Skip to content

Commit

Permalink
the one ps proto (#41659)
Browse files Browse the repository at this point in the history
* the one ps proto

* the one ps proto

* fix

* fix

* fix

* fix windows ci

* fix windows ci

* add dependency

* add dependency
  • Loading branch information
esythan authored Apr 13, 2022
1 parent 27a91b1 commit b12af9e
Show file tree
Hide file tree
Showing 7 changed files with 51 additions and 252 deletions.
20 changes: 19 additions & 1 deletion paddle/fluid/distributed/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,11 +1,29 @@
add_subdirectory(collective)
add_subdirectory(store)
if(WITH_PYTHON)
py_proto_compile(ps_py_proto SRCS the_one_ps.proto)
add_custom_target(ps_py_proto_init ALL
COMMAND ${CMAKE_COMMAND} -E make_directory ${PADDLE_BINARY_DIR}/python/paddle/distributed/fleet/proto)
if (NOT WIN32)
add_custom_command(TARGET ps_py_proto POST_BUILD
COMMAND mv the_one_ps_pb2.py ${PADDLE_BINARY_DIR}/python/paddle/distributed/fleet/proto/)
else(NOT WIN32)
string(REPLACE "/" "\\" fleet_proto_dstpath "${PADDLE_BINARY_DIR}/python/paddle/distributed/fleet/proto/")
add_custom_command(TARGET ps_py_proto POST_BUILD
COMMAND copy /Y the_one_ps_pb2.py ${fleet_proto_dstpath}
COMMENT "Copy generated python the_one_ps_pb2 into directory ${fleet_proto_dstpath}.")
endif(NOT WIN32)
endif()

if(NOT WITH_PSCORE)
add_subdirectory(fleet_executor)
return()
endif()

proto_library(ps_framework_proto SRCS ps.proto)
proto_library(ps_framework_proto SRCS the_one_ps.proto)
add_custom_command(TARGET ps_framework_proto POST_BUILD
COMMAND mv the_one_ps.pb.h ps.pb.h
COMMAND mv the_one_ps.pb.cc ps.pb.cc)

set(DISTRIBUTE_COMPILE_FLAGS "-Wno-error=unused-value -Wno-non-virtual-dtor -Wno-error=non-virtual-dtor -Wno-error=delete-non-virtual-dtor -Wno-error=sign-compare -Wno-error=unused-variable -Wno-error=return-type -Wno-error=unused-but-set-variable -Wno-error=type-limits -Wno-error=unknown-pragmas -Wno-error=parentheses -Wno-error=unused-result")

Expand Down
236 changes: 0 additions & 236 deletions paddle/fluid/distributed/ps.proto

This file was deleted.

23 changes: 23 additions & 0 deletions paddle/fluid/framework/the_one_ps.proto → paddle/fluid/distributed/the_one_ps.proto
100755 → 100644
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,7 @@ message TableParameter {
optional CommonAccessorParameter common = 6;
optional TableType type = 7;
optional bool compress_in_save = 8 [ default = false ];
optional GraphParameter graph_parameter = 9;
}

message TableAccessorParameter {
Expand Down Expand Up @@ -211,3 +212,25 @@ message SparseAdamSGDParameter { // SparseAdamSGDRule
optional double ada_epsilon = 5 [ default = 1e-08 ];
repeated float weight_bounds = 6;
}

message GraphParameter {
optional int32 task_pool_size = 1 [ default = 24 ];
optional bool gpups_mode = 2 [ default = false ];
optional string gpups_graph_sample_class = 3
[ default = "CompleteGraphSampler" ];
optional string gpups_graph_sample_args = 4 [ default = "" ];
optional bool use_cache = 5 [ default = false ];
optional int32 cache_size_limit = 6 [ default = 100000 ];
optional int32 cache_ttl = 7 [ default = 5 ];
optional GraphFeature graph_feature = 8;
optional string table_name = 9 [ default = "" ];
optional string table_type = 10 [ default = "" ];
optional int32 shard_num = 11 [ default = 127 ];
optional int32 gpu_num = 12 [ default = 1 ];
}

message GraphFeature {
repeated string name = 1;
repeated string dtype = 2;
repeated int32 shape = 3;
}
5 changes: 1 addition & 4 deletions paddle/fluid/framework/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -237,21 +237,19 @@ if(WITH_PYTHON)
py_proto_compile(trainer_py_proto SRCS trainer_desc.proto data_feed.proto)
py_proto_compile(distributed_strategy_py_proto SRCS distributed_strategy.proto)
py_proto_compile(pass_desc_py_proto SRCS pass_desc.proto)
py_proto_compile(ps_py_proto SRCS the_one_ps.proto)
#Generate an empty \
#__init__.py to make framework_py_proto as a valid python module.
add_custom_target(fleet_proto_init ALL
COMMAND ${CMAKE_COMMAND} -E make_directory ${PADDLE_BINARY_DIR}/python/paddle/distributed/fleet/proto
COMMAND ${CMAKE_COMMAND} -E touch ${PADDLE_BINARY_DIR}/python/paddle/distributed/fleet/proto/__init__.py
)
add_custom_target(framework_py_proto_init ALL COMMAND ${CMAKE_COMMAND} -E touch __init__.py)
add_dependencies(framework_py_proto framework_py_proto_init trainer_py_proto distributed_strategy_py_proto fleet_proto_init pass_desc_py_proto ps_py_proto)
add_dependencies(framework_py_proto framework_py_proto_init trainer_py_proto distributed_strategy_py_proto fleet_proto_init pass_desc_py_proto ps_py_proto ps_py_proto_init)
if (NOT WIN32)
add_custom_command(TARGET framework_py_proto POST_BUILD
COMMAND ${CMAKE_COMMAND} -E make_directory ${PADDLE_BINARY_DIR}/python/paddle/fluid/proto
COMMAND cp *.py ${PADDLE_BINARY_DIR}/python/paddle/fluid/proto/
COMMAND cp distributed_strategy_*.py ${PADDLE_BINARY_DIR}/python/paddle/distributed/fleet/proto
COMMAND cp the_one_ps_pb2.py ${PADDLE_BINARY_DIR}/python/paddle/distributed/fleet/proto
COMMENT "Copy generated python proto into directory paddle/fluid/proto."
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR})
add_custom_target(fleet_executor_proto_init ALL DEPENDS fleet_proto_init fleet_executor_desc_py_proto
Expand All @@ -263,7 +261,6 @@ if(WITH_PYTHON)
add_custom_command(TARGET framework_py_proto POST_BUILD
COMMAND ${CMAKE_COMMAND} -E make_directory ${PADDLE_BINARY_DIR}/python/paddle/fluid/proto
COMMAND copy /Y *.py ${proto_dstpath}
COMMAND copy /Y the_one_ps_pb2.py ${fleet_proto_dstpath}
COMMAND copy /Y distributed_strategy_*.py ${fleet_proto_dstpath}
COMMENT "Copy generated python proto into directory paddle/fluid/proto."
COMMENT "Copy generated python proto into directory paddle/distributed/fleet/proto."
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/distributed_strategy.proto
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,7 @@ message DistributedStrategy {
optional bool asp = 33 [ default = false ];
optional bool fuse_grad_merge = 34 [ default = false ];
optional bool semi_auto = 35 [ default = false ];
optional bool adam_d2sum = 36 [ default = true ];
optional bool adam_d2sum = 36 [ default = false ];
optional bool auto_search = 37 [ default = false ];
optional bool heter_ccl_mode = 38 [ default = false ];

Expand Down
4 changes: 2 additions & 2 deletions python/paddle/distributed/fleet/base/distributed_strategy.py
Original file line number Diff line number Diff line change
Expand Up @@ -404,7 +404,7 @@ def trainer_desc_configs(self):
def adam_d2sum(self):
"""
set adam_d2sum
Default value: True
Default value: False
Examples:
Expand All @@ -415,7 +415,7 @@ def adam_d2sum(self):
fleet.init(role_maker)
strategy = fleet.DistributedStrategy()
strategy.adam_d2sum = True # by default this is True
strategy.adam_d2sum = True # by default this is False
# code block for defining loss and local optimizer
# sgd = fleet.distributed_optimizer(optimizer, strategy)
Expand Down
Loading

0 comments on commit b12af9e

Please sign in to comment.