Skip to content

Commit

Permalink
[CodeStyle][E266] remove multiple '#' in comments (#47772)
Browse files Browse the repository at this point in the history
* fix flake8 CodeStyle E266

* fix comments
  • Loading branch information
caolonghao authored Nov 9, 2022
1 parent a97b363 commit 8c8cf0f
Show file tree
Hide file tree
Showing 37 changed files with 159 additions and 158 deletions.
30 changes: 15 additions & 15 deletions paddle/fluid/eager/auto_code_generator/generator/codegen_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@
import yaml
import re

########################
### Global Variables ###
########################
####################
# Global Variables #
####################
ops_to_fill_zero_for_empty_grads = set(
[
"split_grad",
Expand Down Expand Up @@ -95,9 +95,9 @@
}


#############################
### File Reader Helpers ###
#############################
#########################
# File Reader Helpers #
#########################
def AssertMessage(lhs_str, rhs_str):
return f"lhs: {lhs_str}, rhs: {rhs_str}"

Expand Down Expand Up @@ -127,9 +127,9 @@ def ReadBwdFile(filepath):
return ret


##################################
### Generic Helper Functions ###
##################################
##############################
# Generic Helper Functions #
##############################
def FindGradName(string):
return string + "_grad"

Expand Down Expand Up @@ -252,9 +252,9 @@ def GetIndent(num):
return "".join([tab for i in range(num)])


######################
### Yaml Parsers ###
######################
##################
# Yaml Parsers #
##################
def ParseYamlArgs(string):
# Example: const Tensor& x, const Tensor& y, bool transpose_x, bool transpose_y

Expand Down Expand Up @@ -398,9 +398,9 @@ def ParseYamlInplaceInfo(string):
return inplace_map


########################
### Generator Base ###
########################
####################
# Generator Base #
####################
class FunctionGeneratorBase:
def __init__(self, forward_api_contents, namespace):
self.forward_api_contents = forward_api_contents
Expand Down
54 changes: 27 additions & 27 deletions paddle/fluid/eager/auto_code_generator/generator/eager_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,9 @@
]


###########
## Utils ##
###########
#########
# Utils #
#########
def ParseArguments():
parser = argparse.ArgumentParser(
description='Eager Code Generator Args Parser'
Expand All @@ -72,9 +72,9 @@ def ParseArguments():
return args


########################
## Code Gen Templates ##
########################
######################
# Code Gen Templates #
######################
SET_PLAIN_TENSOR_WRAPPER_TEMPLATE = """ void SetTensorWrapper{}(const paddle::experimental::Tensor& {}) {{
{} = egr::TensorWrapper({}, {});
}}
Expand Down Expand Up @@ -479,9 +479,9 @@ def IsInvokeForwardApi(api_contents, forward_api_name_list):
)


#######################
## Generator Helpers ##
#######################
#####################
# Generator Helpers #
#####################
def GenerateCoreOpInfoDeclaration():
return CORE_OPS_DECLARATION_TEMPLATE

Expand Down Expand Up @@ -517,9 +517,9 @@ def GenerateCoreOpInfoDefinition():
return core_ops_info_definition_str


#####################
## Generator Class ##
#####################
###################
# Generator Class #
###################
class DygraphFunctionGeneratorBase(FunctionGeneratorBase):
def __init__(
self,
Expand Down Expand Up @@ -1033,9 +1033,9 @@ def run(self):
# Basic Validation Check
self.DygraphYamlValidationCheck()

##########################
## Parsing Raw Contents ##
##########################
########################
# Parsing Raw Contents #
########################
# Parse forward and backward inplace_map
self.ParseForwardInplaceInfo()
if self.grad_api_contents is not None:
Expand Down Expand Up @@ -1066,9 +1066,9 @@ def run(self):
# Forwards Validation Check
self.ForwardsValidationCheck()

#############################
## Process Parsed Contents ##
#############################
###########################
# Process Parsed Contents #
###########################
# Initialize forward_inputs_position_map, forward_outputs_position_map
self.DetermineForwardPositionMap(
self.forward_inputs_list, self.forward_returns_list
Expand Down Expand Up @@ -1711,9 +1711,9 @@ def UpdateCoreOpsInformation(self, is_inplaced):
def run(self):
super().run()

#####################
## Code Generation ##
#####################
###################
# Code Generation #
###################

# Definition And Declaration
self.GenerateForwardDefinitionAndDeclaration(is_inplaced=False)
Expand Down Expand Up @@ -2341,9 +2341,9 @@ def run(self):

self.ResetOptionalInputs()

#####################
## Code Generation ##
#####################
###################
# Code Generation #
###################
# Higher-order GradNode generation
(
has_higher_order_node,
Expand Down Expand Up @@ -2503,9 +2503,9 @@ def run(self):
self.GenerateCode()


##################
## File Writers ##
##################
################
# File Writers #
################
def GenerateNodeCCFile(filepath, node_definition_str):
if os.path.exists(filepath):
os.remove(filepath)
Expand Down
24 changes: 12 additions & 12 deletions paddle/fluid/eager/auto_code_generator/generator/python_c_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@
from codegen_utils import GetForwardFunctionName, IsVectorTensorType
from codegen_utils import GetInplacedFunctionName

###########################
## Global Configurations ##
###########################
#########################
# Global Configurations #
#########################
skipped_forward_api_names = set([])


Expand Down Expand Up @@ -58,9 +58,9 @@ def FindParsingFunctionFromAttributeType(atype):
return atype_to_parsing_function[atype]


##########################
## Refactored Functions ##
##########################
########################
# Refactored Functions #
########################
PARSE_PYTHON_C_TENSORS_TEMPLATE = (
" auto {} = {}(\"{}\", \"{}\", args, {}, {});\n"
)
Expand Down Expand Up @@ -234,9 +234,9 @@ def FindParsingFunctionFromAttributeType(atype):
"""


#######################
## Generator Classes ##
#######################
#####################
# Generator Classes #
#####################
class PythonCSingleFunctionGenerator(FunctionGeneratorBase):
def __init__(self, forward_api_contents, namespace):
# Members from Parent:
Expand Down Expand Up @@ -565,9 +565,9 @@ def run(self):
self.AttachNamespace()


############################
## Code Generation Helper ##
############################
##########################
# Code Generation Helper #
##########################
def ParseArguments():
parser = argparse.ArgumentParser(
description='Eager Code Generator Args Parser'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@
"equal": "EqualParser",
"expand": "ExpandParser",
"squeeze2": "SqueezeParser",
## backwords
# backwords
"matmul_grad": "MatMulGradParser",
"mul_grad": "MulGradParser",
"relu_grad": "ReluGradParser",
Expand All @@ -93,7 +93,7 @@
"gather_grad": "GatherGradParser",
"transpose2_grad": "TransposeGradParser",
"layer_norm_grad": "LayerNormGradParser",
## opt
# opt
"sgd": "SGDParser",
# "adam": "AdamParser",
}
Expand Down Expand Up @@ -445,7 +445,7 @@ def _apply(self):
return [min_out], [[0]]


## cal
# cal
class LogParser(AscendParserBase):
def __init__(self, graph, var2geop):
super().__init__(graph, var2geop)
Expand Down Expand Up @@ -605,7 +605,7 @@ def _apply(self):
# return [increment]


## matrix cal
# matrix cal
class MatMulParser(AscendParserBase):
def __init__(self, graph, var2geop):
super().__init__(graph, var2geop)
Expand Down Expand Up @@ -803,7 +803,7 @@ def _apply(self):
return [y, mean, variance], [[1], [2], [0]]


## activate function
# activate function
class ReluParser(AscendParserBase):
def __init__(self, graph, var2geop):
super().__init__(graph, var2geop)
Expand Down Expand Up @@ -843,7 +843,7 @@ def _apply(self):
return [tanh], [[0]]


## loss function
# loss function
class SoftmaxWithCrossEntropyParser(AscendParserBase):
def __init__(self, graph, var2geop):
super().__init__(graph, var2geop)
Expand Down Expand Up @@ -932,7 +932,7 @@ def _apply(self):
return [softmax], [[0]]


## general
# general
class ShapeParser(AscendParserBase):
def __init__(self, graph, var2geop):
super().__init__(graph, var2geop)
Expand Down Expand Up @@ -1038,7 +1038,7 @@ def _apply(self):
.set_attr_int32("seed", 0)
)

## wirte the output of truncatedNormal from startup_program to main_program
# wirte the output of truncatedNormal from startup_program to main_program
if self.op.block.var(self.op.output('Out')[0]).persistable:
# print("%s is Persistable in truncated_normal" %
# (self.op.output('Out')[0]))
Expand Down Expand Up @@ -1524,7 +1524,7 @@ def _apply(self):
return [shape, output], [[1], [0]]


## parallel
# parallel
class AllGatherParser(AscendParserBase):
def __init__(self, graph, var2geop):
super().__init__(graph, var2geop)
Expand Down Expand Up @@ -1821,7 +1821,7 @@ def _apply(self):
# *************************** *************************#
# *************************** *************************#
# ****************************************************************#
## grad
# grad
class ReduceSumGradParser(AscendParserBase):
def __init__(self, graph, var2geop):
super().__init__(graph, var2geop)
Expand Down
4 changes: 2 additions & 2 deletions python/paddle/distributed/launch/controllers/collective.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def _build_pod_with_args(self):
"PADDLE_GLOBAL_RANK": "{}".format(i + rank_offset),
"PADDLE_LOCAL_RANK": "{}".format(i),
"PADDLE_NNODES": "{}".format(len(ips)),
## compatible env
# compatible env
"PADDLE_TRAINER_ENDPOINTS": ",".join(job_endpoints),
"PADDLE_CURRENT_ENDPOINT": job_endpoints[i + rank_offset],
"PADDLE_TRAINER_ID": "{}".format(i + rank_offset),
Expand Down Expand Up @@ -157,7 +157,7 @@ def _build_pod_with_master(self):
"PADDLE_GLOBAL_RANK": "{}".format(i + rank_offset),
"PADDLE_LOCAL_RANK": "{}".format(i),
"PADDLE_NNODES": "{}".format(self.job.replicas),
## compatible env
# compatible env
"PADDLE_TRAINER_ENDPOINTS": ",".join(job_endpoints),
"PADDLE_CURRENT_ENDPOINT": endpoints[i],
"PADDLE_TRAINER_ID": "{}".format(i + rank_offset),
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/distributed/passes/ps_trainer_pass.py
Original file line number Diff line number Diff line change
Expand Up @@ -1238,7 +1238,7 @@ def _create_trainer_program(
attrs=attrs,
)

## TODO add check for bp block
# TODO add check for bp block
# check_op_device(program.global_block(), DEFAULT_DEVICE)

def _apply_single_impl(self, main_program, startup_program, pass_ctx):
Expand Down
10 changes: 5 additions & 5 deletions python/paddle/distributed/ps/utils/public.py
Original file line number Diff line number Diff line change
Expand Up @@ -710,7 +710,7 @@ def _append_heter_op(op, current_heter_block_ops, heter_ops):
):
param_name = op.input(SPARSE_OP_TYPE_DICT[forward_op_type])[0]
if param_name in var2idx:
## insert sum op & remove sum op from var2idx and origin place
# insert sum op & remove sum op from var2idx and origin place
op_list = list(block.ops)
sum_op = op_list[var2idx[param_name]]
sum_op_inputs = {
Expand Down Expand Up @@ -979,7 +979,7 @@ def find_entrance_exit_private(program, program_block_ops_list):
block_var_detail = []
persistables = []
for index, block_op_list in enumerate(program_block_ops_list):
## forward
# forward
block_input, block_output = find_ops_list_input_output(
program, block_op_list["forward"]
)
Expand All @@ -999,7 +999,7 @@ def find_entrance_exit_private(program, program_block_ops_list):
}
}

## backward
# backward
bp_block_input, bp_block_output = find_ops_list_input_output(
program, block_op_list["backward"]
)
Expand Down Expand Up @@ -1115,7 +1115,7 @@ def entrance_exit_check(
def delete_block_useless_exit(
program, program_block_ops_list, block_var_detail
):
## forward
# forward
for index in range(len(block_var_detail)):
if index == len(block_var_detail) - 1:
break
Expand All @@ -1128,7 +1128,7 @@ def delete_block_useless_exit(

for var in need_delete_var:
current_block_exit.remove(var)
## backward
# backward
for index in range(len(block_var_detail) - 1, -1, -1):
if index - 1 < 0:
break
Expand Down
Loading

0 comments on commit 8c8cf0f

Please sign in to comment.