Skip to content

Commit

Permalink
Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into…
Browse files Browse the repository at this point in the history
… develop
  • Loading branch information
zhwesky2010 committed Jun 14, 2022
2 parents bcf3bfb + c62a7e2 commit e964b42
Show file tree
Hide file tree
Showing 252 changed files with 7,181 additions and 1,726 deletions.
63 changes: 1 addition & 62 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ repos:
description: Format files with ClangFormat.
entry: bash ./tools/codestyle/clang_format.hook -i
language: system
files: \.(c|cc|cxx|cpp|cu|h|hpp|hxx|proto|xpu|kps)$
files: \.(c|cc|cxx|cpp|cu|h|hpp|hxx|xpu|kps)$
exclude: |
(?x)^(
paddle/fluid/distributed/ps/thirdparty/round_robin.h
Expand Down Expand Up @@ -81,64 +81,3 @@ repos:
- id: cmakelint
args: [--config=./tools/codestyle/.cmakelintrc]
# exclude files which need to be fixed
exclude: |
(?x)^(
CMakeLists.txt|
python/paddle/fluid/tests/unittests/CMakeLists.txt|
paddle/fluid/inference/tests/infer_ut/CMakeLists.txt|
cmake/configure.cmake|
paddle/fluid/inference/api/demo_ci/CMakeLists.txt|
cmake/flags.cmake|
cmake/inference_lib.cmake|
cmake/external/protobuf.cmake|
paddle/fluid/framework/fleet/CMakeLists.txt|
paddle/fluid/inference/CMakeLists.txt|
paddle/fluid/inference/tests/api/CMakeLists.txt|
paddle/fluid/operators/CMakeLists.txt|
cmake/external/lite.cmake|
cmake/external/poplar.cmake|
cmake/python_module.cmake|
python/paddle/fluid/tests/unittests/asp/CMakeLists.txt|
cmake/cuda.cmake|
cmake/FindNumPy.cmake|
cmake/coveralls.cmake|
cmake/external/glog.cmake|
cmake/external/onnxruntime.cmake|
cmake/external/openblas.cmake|
cmake/external/xpu.cmake|
cmake/hip.cmake|
paddle/fluid/inference/analysis/ir_passes/CMakeLists.txt|
paddle/fluid/inference/api/CMakeLists.txt|
paddle/fluid/operators/controlflow/CMakeLists.txt|
python/paddle/fluid/tests/unittests/distributed_passes/CMakeLists.txt|
cmake/operators.cmake|
cmake/tensorrt.cmake|
paddle/fluid/inference/api/details/CMakeLists.txt|
python/paddle/fluid/tests/unittests/xpu/CMakeLists.txt|
cmake/external/arm_brpc.cmake|
cmake/external/concurrentqueue.cmake|
cmake/external/eigen.cmake|
cmake/external/mklml.cmake|
cmake/external/paddle2onnx.cmake|
cmake/miopen.cmake|
cmake/nccl.cmake|
cmake/simd.cmake|
paddle/fluid/inference/analysis/CMakeLists.txt|
paddle/fluid/inference/tests/infer_ut/external-cmake/gtest-cpp.cmake|
paddle/fluid/memory/allocation/CMakeLists.txt|
paddle/fluid/memory/CMakeLists.txt|
paddle/fluid/operators/cinn/CMakeLists.txt|
paddle/infrt/external_kernels/CMakeLists.txt|
paddle/infrt/kernel/phi/CMakeLists.txt|
python/paddle/fluid/contrib/slim/tests/CMakeLists.txt|
python/paddle/fluid/tests/unittests/autograd/CMakeLists.txt|
python/paddle/fluid/tests/unittests/distribution/CMakeLists.txt|
python/paddle/fluid/tests/unittests/dygraph_to_static/CMakeLists.txt|
python/paddle/fluid/tests/unittests/fft/CMakeLists.txt|
python/paddle/fluid/tests/unittests/ipu/CMakeLists.txt|
python/paddle/fluid/tests/unittests/mkldnn/CMakeLists.txt|
python/paddle/fluid/tests/unittests/npu/CMakeLists.txt|
python/paddle/fluid/tests/unittests/ps/CMakeLists.txt|
python/paddle/fluid/tests/unittests/rnn/CMakeLists.txt|
python/paddle/fluid/tests/unittests/sequence/CMakeLists.txt
)$
22 changes: 11 additions & 11 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@ if(APPLE AND WITH_ARM)
# cmake 3.19.2 version starts to support M1
cmake_minimum_required(VERSION 3.19.2)
cmake_policy(VERSION 3.19.2)
else(APPLE AND WITH_ARM)
else()
cmake_minimum_required(VERSION 3.15)
cmake_policy(VERSION 3.10)
endif(APPLE AND WITH_ARM)
endif()
# use to get_property location of static lib
# https://cmake.org/cmake/help/v3.0/policy/CMP0026.html?highlight=cmp0026
cmake_policy(SET CMP0026 OLD)
Expand Down Expand Up @@ -152,7 +152,7 @@ if(WIN32)
if(${flag_var} MATCHES "/MD")
string(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}")
endif()
endforeach(flag_var)
endforeach()
endif()

# NOTE(zhouwei): msvc max/min macro conflict with std::min/max, define NOMINMAX globally
Expand All @@ -179,10 +179,10 @@ if(WIN32)
math(EXPR PROCESS_MAX "${CPU_CORES} * 2 / 3")
set(${flag_var} "${${flag_var}} /MP${PROCESS_MAX}")
endif()
endforeach(flag_var)
endforeach()
foreach(flag_var CMAKE_CXX_FLAGS CMAKE_C_FLAGS)
set(${flag_var} "${${flag_var}} /w")
endforeach(flag_var)
endforeach()

# Windows Remove /Zi, /ZI for Release, MinSizeRel builds
foreach(flag_var
Expand All @@ -191,7 +191,7 @@ if(WIN32)
if(${flag_var} MATCHES "/Z[iI]")
string(REGEX REPLACE "/Z[iI]" "" ${flag_var} "${${flag_var}}")
endif()
endforeach(flag_var)
endforeach()

set(CMAKE_C_FLAGS
"${CMAKE_C_FLAGS} /wd4068 /wd4129 /wd4244 /wd4267 /wd4297 /wd4530 /wd4577 /wd4819 /wd4838"
Expand All @@ -207,7 +207,7 @@ if(WIN32)
if(MSVC_STATIC_CRT)
set(${flag_var} "${${flag_var}} /NODEFAULTLIB:MSVCRT.LIB")
endif()
endforeach(flag_var)
endforeach()

if(WITH_WIN_DUMP_DBG)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /Zi")
Expand All @@ -216,16 +216,16 @@ if(WIN32)
foreach(flag_var CMAKE_SHARED_LINKER_FLAGS CMAKE_STATIC_LINKER_FLAGS
CMAKE_EXE_LINKER_FLAGS CMAKE_LINKER_FLAGS)
set(${flag_var} "${${flag_var}} /DEBUG /OPT:REF /OPT:ICF")
endforeach(flag_var)
endforeach()

add_definitions("-DWITH_WIN_DUMP_DBG")
endif()

else(WIN32)
else()
set(CMAKE_CXX_FLAGS
"${CMAKE_CXX_FLAGS} -Wno-error=deprecated-declarations -Wno-deprecated-declarations"
)
endif(WIN32)
endif()

find_package(Git REQUIRED)

Expand Down Expand Up @@ -430,7 +430,7 @@ endif()
if(WITH_ROCM)
include(hip)
include(miopen) # set miopen libraries, must before configure
endif(WITH_ROCM)
endif()

if(WITH_XPU_KP)
include(xpu_kp)
Expand Down
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ English | [简体中文](./README_cn.md)
Welcome to the PaddlePaddle GitHub.

PaddlePaddle, as the first independent R&D deep learning platform in China, has been officially open-sourced to professional communities since 2016. It is an industrial platform with advanced technologies and rich features that cover core deep learning frameworks, basic model libraries, end-to-end development kits, tools & components as well as service platforms.
PaddlePaddle is originated from industrial practices with dedication and commitments to industrialization. It has been widely adopted by a wide range of sectors including manufacturing, agriculture, enterprise service, and so on while serving more than 4 million developers, 157,000 companies and generating 476,000 models. With such advantages, PaddlePaddle has helped an increasing number of partners commercialize AI.
PaddlePaddle is originated from industrial practices with dedication and commitments to industrialization. It has been widely adopted by a wide range of sectors including manufacturing, agriculture, enterprise service, and so on while serving more than 4.7 million developers, 180,000 companies and generating 560,000 models. With such advantages, PaddlePaddle has helped an increasing number of partners commercialize AI.


## Installation
Expand Down Expand Up @@ -85,7 +85,7 @@ We provide [English](https://www.paddlepaddle.org.cn/documentation/docs/en/guide

- [Github Issues](https://github.com/PaddlePaddle/Paddle/issues): bug reports, feature requests, install issues, usage issues, etc.
- QQ discussion group: 441226485 (PaddlePaddle).
- [Forums](https://ai.baidu.com/forum/topic/list/168?pageNo=1): discuss implementations, research, etc.
- [Forums](https://aistudio.baidu.com/paddle/forum): discuss implementations, research, etc.

## Courses

Expand Down
6 changes: 3 additions & 3 deletions README_cn.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,11 @@

欢迎来到 PaddlePaddle GitHub

飞桨(PaddlePaddle)以百度多年的深度学习技术研究和业务应用为基础,是中国首个自主研发、功能完备、 开源开放的产业级深度学习平台,集深度学习核心训练和推理框架、基础模型库、端到端开发套件和丰富的工具组件于一体。目前,飞桨累计开发者406万,服务企业15.7万家,基于飞桨开源深度学习平台产生了47.6万个模型。飞桨助力开发者快速实现AI想法,快速上线AI业务。帮助越来越多的行业完成AI赋能,实现产业智能化升级。
飞桨(PaddlePaddle)以百度多年的深度学习技术研究和业务应用为基础,是中国首个自主研发、功能完备、 开源开放的产业级深度学习平台,集深度学习核心训练和推理框架、基础模型库、端到端开发套件和丰富的工具组件于一体。目前,飞桨累计开发者477万,服务企业18万家,基于飞桨开源深度学习平台产生了56万个模型。飞桨助力开发者快速实现AI想法,快速上线AI业务。帮助越来越多的行业完成AI赋能,实现产业智能化升级。

## 安装

### PaddlePaddle最新版本: [v2.2](https://github.com/PaddlePaddle/Paddle/tree/release/2.2)
### PaddlePaddle最新版本: [v2.3](https://github.com/PaddlePaddle/Paddle/tree/release/2.3)

跟进PaddlePaddle最新特性请参考我们的[版本说明](https://github.com/PaddlePaddle/Paddle/releases)

Expand Down Expand Up @@ -83,7 +83,7 @@ PaddlePaddle用户可领取**免费Tesla V100在线算力资源**,训练模型

- 欢迎您通过[Github Issues](https://github.com/PaddlePaddle/Paddle/issues)来提交问题、报告与建议
- QQ群: 441226485 (PaddlePaddle)
- [论坛](https://ai.baidu.com/forum/topic/list/168): 欢迎大家在PaddlePaddle论坛分享在使用PaddlePaddle中遇到的问题和经验, 营造良好的论坛氛围
- [论坛](https://aistudio.baidu.com/paddle/forum): 欢迎大家在PaddlePaddle论坛分享在使用PaddlePaddle中遇到的问题和经验, 营造良好的论坛氛围

## 课程

Expand Down
4 changes: 2 additions & 2 deletions cmake/FindNumPy.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ if(PYTHON_EXECUTABLE)
OUTPUT_VARIABLE NUMPY_PATH)
elseif(_numpy_out)
message(STATUS "Python executable not found.")
endif(PYTHON_EXECUTABLE)
endif()

find_path(PYTHON_NUMPY_INCLUDE_DIR numpy/arrayobject.h
HINTS "${NUMPY_PATH}" "${PYTHON_INCLUDE_PATH}")
Expand All @@ -35,7 +35,7 @@ if(PYTHON_NUMPY_INCLUDE_DIR)
set(PYTHON_NUMPY_FOUND
1
CACHE INTERNAL "Python numpy found")
endif(PYTHON_NUMPY_INCLUDE_DIR)
endif()

include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(NumPy DEFAULT_MSG PYTHON_NUMPY_INCLUDE_DIR)
22 changes: 11 additions & 11 deletions cmake/configure.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -14,19 +14,19 @@

if(NOT WITH_PYTHON)
add_definitions(-DPADDLE_NO_PYTHON)
endif(NOT WITH_PYTHON)
endif()

if(WITH_TESTING)
add_definitions(-DPADDLE_WITH_TESTING)
endif(WITH_TESTING)
endif()

if(WITH_INFERENCE_API_TEST)
add_definitions(-DPADDLE_WITH_INFERENCE_API_TEST)
endif(WITH_INFERENCE_API_TEST)
endif()

if(NOT WITH_PROFILER)
add_definitions(-DPADDLE_DISABLE_PROFILER)
endif(NOT WITH_PROFILER)
endif()

if(WITH_AVX AND AVX_FOUND)
set(SIMD_FLAG ${AVX_FLAG})
Expand Down Expand Up @@ -60,8 +60,8 @@ if(WIN32)
FATAL
"Windows build only support msvc. Which was binded by the nvcc compiler of NVIDIA."
)
endif(NOT MSVC)
endif(WIN32)
endif()
endif()

if(WITH_MUSL)
add_definitions(-DPADDLE_WITH_MUSL)
Expand Down Expand Up @@ -195,9 +195,9 @@ if(WITH_MKLML AND MKLML_IOMP_LIB)
if(WIN32)
# openmp not support well for now on windows
set(OPENMP_FLAGS "")
else(WIN32)
else()
set(OPENMP_FLAGS "-fopenmp")
endif(WIN32)
endif()
set(CMAKE_C_CREATE_SHARED_LIBRARY_FORBIDDEN_FLAGS ${OPENMP_FLAGS})
set(CMAKE_CXX_CREATE_SHARED_LIBRARY_FORBIDDEN_FLAGS ${OPENMP_FLAGS})
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${OPENMP_FLAGS}")
Expand All @@ -221,15 +221,15 @@ endif()

if(WITH_BRPC_RDMA)
add_definitions(-DPADDLE_WITH_BRPC_RDMA)
endif(WITH_BRPC_RDMA)
endif()

if(ON_INFER)
add_definitions(-DPADDLE_ON_INFERENCE)
endif(ON_INFER)
endif()

if(WITH_CRYPTO)
add_definitions(-DPADDLE_WITH_CRYPTO)
endif(WITH_CRYPTO)
endif()

if(WITH_CUSTOM_DEVICE AND NOT WIN32)
add_definitions(-DPADDLE_WITH_CUSTOM_DEVICE)
Expand Down
2 changes: 1 addition & 1 deletion cmake/coveralls.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ if(WITH_COVERAGE)
if(NOT ${EXCLUDE_DIR_FOUND} EQUAL -1)
list(REMOVE_ITEM PADDLE_SOURCES ${TMP_PATH})
endif()
endforeach(TMP_PATH)
endforeach()
endforeach()

# convert to absolute path
Expand Down
4 changes: 2 additions & 2 deletions cmake/cuda.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,7 @@ set(CMAKE_CUDA_STANDARD 14)
# So replace /W[1-4] with /W0
if(WIN32)
string(REGEX REPLACE "/W[1-4]" " /W0 " CMAKE_CUDA_FLAGS "${CMAKE_CUDA_FLAGS}")
endif(WIN32)
endif()
# in cuda9, suppress cuda warning on eigen
set(CMAKE_CUDA_FLAGS "${CMAKE_CUDA_FLAGS} -w")
# Set :expt-relaxed-constexpr to suppress Eigen warnings
Expand All @@ -293,7 +293,7 @@ if(WIN32)
if(${flag_var} MATCHES "-MD")
string(REGEX REPLACE "-MD" "-MT" ${flag_var} "${${flag_var}}")
endif()
endforeach(flag_var)
endforeach()
endif()
endif()

Expand Down
3 changes: 2 additions & 1 deletion cmake/external/eigen.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,8 @@ if(WIN32)
elseif(LINUX)
if(WITH_ROCM)
# For HIPCC Eigen::internal::device::numeric_limits is not EIGEN_DEVICE_FUNC
# which will cause compiler error of using __host__ funciont in __host__ __device__
# which will cause compiler error of using __host__ funciont
# in __host__ __device__
file(TO_NATIVE_PATH ${PADDLE_SOURCE_DIR}/patches/eigen/Meta.h native_src)
file(TO_NATIVE_PATH ${EIGEN_SOURCE_DIR}/Eigen/src/Core/util/Meta.h
native_dst)
Expand Down
4 changes: 2 additions & 2 deletions cmake/external/glog.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,12 @@ if(WIN32)
CACHE FILEPATH "glog library." FORCE)
set(GLOG_CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4267 /wd4530")
add_definitions("/DGOOGLE_GLOG_DLL_DECL=")
else(WIN32)
else()
set(GLOG_LIBRARIES
"${GLOG_INSTALL_DIR}/lib/libglog.a"
CACHE FILEPATH "glog library." FORCE)
set(GLOG_CMAKE_CXX_FLAGS ${CMAKE_CXX_FLAGS})
endif(WIN32)
endif()

include_directories(${GLOG_INCLUDE_DIR})

Expand Down
8 changes: 5 additions & 3 deletions cmake/external/mklml.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,8 @@ if(WIN32)
set(MKLML_SHARED_IOMP_LIB ${MKLML_LIB_DIR}/libiomp5md.dll)
else()
#TODO(intel-huying):
# Now enable csrmm function in mklml library temporarily, it will be updated as offical version later.
# Now enable csrmm function in mklml library temporarily,
# it will be updated as offical version later.
set(MKLML_VER
"csrmm_mklml_lnx_2019.0.5"
CACHE STRING "" FORCE)
Expand All @@ -51,8 +52,9 @@ message(STATUS "MKLML_VER: ${MKLML_VER}, MKLML_URL: ${MKLML_URL}")
set(MKLML_PREFIX_DIR ${THIRD_PARTY_PATH}/mklml)
set(MKLML_SOURCE_DIR ${THIRD_PARTY_PATH}/mklml/src/extern_mklml)

# Ninja Generator can not establish the correct dependency relationship between the imported library with target,
# the product file in the ExternalProject need to be specified manually, please refer to
# Ninja Generator can not establish the correct dependency relationship
# between the imported library with target, the product file
# in the ExternalProject need to be specified manually, please refer to
# https://stackoverflow.com/questions/54866067/cmake-and-ninja-missing-and-no-known-rule-to-make-it
# It is the same to all other ExternalProject.
ExternalProject_Add(
Expand Down
4 changes: 2 additions & 2 deletions cmake/external/openblas.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ if(NOT WIN32)
UPDATE_COMMAND ""
CONFIGURE_COMMAND ""
BUILD_BYPRODUCTS ${CBLAS_LIBRARIES})
else(NOT WIN32)
else()
set(CBLAS_LIBRARIES
"${CBLAS_INSTALL_DIR}/lib/openblas${CMAKE_STATIC_LIBRARY_SUFFIX}"
CACHE FILEPATH "openblas library." FORCE)
Expand Down Expand Up @@ -92,4 +92,4 @@ else(NOT WIN32)
BUILD_BYPRODUCTS ${CBLAS_LIBRARIES})
set(OPENBLAS_SHARED_LIB
${CBLAS_INSTALL_DIR}/bin/openblas${CMAKE_SHARED_LIBRARY_SUFFIX})
endif(NOT WIN32)
endif()
2 changes: 1 addition & 1 deletion cmake/external/paddle2onnx.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ else()
set(PADDLE2ONNX_COMPILE_LIB
"${PADDLE2ONNX_INSTALL_DIR}/lib/libpaddle2onnx.so"
CACHE FILEPATH "paddle2onnx compile library." FORCE)
endif(WIN32)
endif()

if(WIN32)
set(PADDLE2ONNX_URL
Expand Down
Loading

0 comments on commit e964b42

Please sign in to comment.