From 24268f3abc209c8e67474b2d258dc6058c51d22d Mon Sep 17 00:00:00 2001 From: XBWGC <67684278+XBWGC@users.noreply.github.com> Date: Tue, 17 Aug 2021 13:40:31 +0800 Subject: [PATCH] export enable_manual_shard to python sid (#67) * add ipu_stage for pipeline * Update popart_canonicalization_pass.cc * del hasattr * export enable_manual_shard to python side --- paddle/fluid/framework/ipu/ipu_backend.cc | 10 +++- paddle/fluid/framework/ipu/ipu_strategy.h | 2 + paddle/fluid/pybind/pybind.cc | 19 +++++++ .../tests/unittests/ipu/ipu_conv_test.py | 3 +- .../tests/unittests/ipu/ipu_strategy_test.py | 57 +++++++++++++++++++ 5 files changed, 86 insertions(+), 5 deletions(-) create mode 100644 python/paddle/fluid/tests/unittests/ipu/ipu_strategy_test.py diff --git a/paddle/fluid/framework/ipu/ipu_backend.cc b/paddle/fluid/framework/ipu/ipu_backend.cc index 5d3593b27b3ee..e5847e144f0db 100644 --- a/paddle/fluid/framework/ipu/ipu_backend.cc +++ b/paddle/fluid/framework/ipu/ipu_backend.cc @@ -153,11 +153,15 @@ void IpuBackend::Prepare() { paddle::platform::errors::InvalidArgument( "loss_id = %s doesn't exist in popart graph.", optimizer_.loss_)); session_ = popart::TrainingSession::createFromOnnxModel( - proto, dataFlow, it->second, *popart_optimizer, curr_device_); + proto, dataFlow, it->second, *popart_optimizer, curr_device_, + popart::InputShapeInfo(), ipu_strategy_->popart_options_, + popart::Patterns(popart::PatternsLevel::Default)); } else { VLOG(1) << "Creating InferenceSession from Onnx Model..."; - session_ = popart::InferenceSession::createFromOnnxModel(proto, dataFlow, - curr_device_); + session_ = popart::InferenceSession::createFromOnnxModel( + proto, dataFlow, curr_device_, popart::InputShapeInfo(), + ipu_strategy_->popart_options_, + popart::Patterns(popart::PatternsLevel::Default)); } VLOG(1) << "Creating session from Onnx Model...done"; diff --git a/paddle/fluid/framework/ipu/ipu_strategy.h b/paddle/fluid/framework/ipu/ipu_strategy.h index b223c3e88feb7..157dbc7ba72e8 100644 --- a/paddle/fluid/framework/ipu/ipu_strategy.h +++ b/paddle/fluid/framework/ipu/ipu_strategy.h @@ -34,6 +34,8 @@ namespace framework { namespace ipu { +using VirtualGraphMode = popart::VirtualGraphMode; + struct IpuStrategy { int num_ipus_ = 1; bool is_training_ = true; diff --git a/paddle/fluid/pybind/pybind.cc b/paddle/fluid/pybind/pybind.cc index 88158bf32b9c7..284bb3af3a1db 100644 --- a/paddle/fluid/pybind/pybind.cc +++ b/paddle/fluid/pybind/pybind.cc @@ -3230,6 +3230,25 @@ All parameter, weight, gradient are variables in Paddle. }, R"DOC( Bool type, True enable pipeline, otherwise disable. Default False. + )DOC") + .def_property( + "enable_manual_shard", + [](const ipu::IpuStrategy &self) { + return self.popart_options_.virtualGraphMode == + ipu::VirtualGraphMode::Manual; + }, + [](ipu::IpuStrategy &self, bool enable_ipu_shard) { + if (enable_ipu_shard) { + self.popart_options_.virtualGraphMode = + ipu::VirtualGraphMode::Manual; + } else { + self.popart_options_.virtualGraphMode = + ipu::VirtualGraphMode::Off; + } + }, + R"DOC( + Bool type, True enable model sharding, otherwise disable. Default " + "False. )DOC"); #endif diff --git a/python/paddle/fluid/tests/unittests/ipu/ipu_conv_test.py b/python/paddle/fluid/tests/unittests/ipu/ipu_conv_test.py index ec8966fdd2188..a117e25c0c904 100644 --- a/python/paddle/fluid/tests/unittests/ipu/ipu_conv_test.py +++ b/python/paddle/fluid/tests/unittests/ipu/ipu_conv_test.py @@ -42,8 +42,7 @@ def _test(self, run_ipu=True): name='image', shape=[1, 3, 10, 10], dtype='float32') conv1 = paddle.static.nn.conv2d( image, num_filters=3, filter_size=3, bias_attr=False) - conv2 = conv1 + conv1 - loss = paddle.mean(conv2) + loss = paddle.mean(conv1) adam = paddle.optimizer.Adam(learning_rate=1e-2) adam.minimize(loss) diff --git a/python/paddle/fluid/tests/unittests/ipu/ipu_strategy_test.py b/python/paddle/fluid/tests/unittests/ipu/ipu_strategy_test.py new file mode 100644 index 0000000000000..741ca8784bb60 --- /dev/null +++ b/python/paddle/fluid/tests/unittests/ipu/ipu_strategy_test.py @@ -0,0 +1,57 @@ +# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import numpy as np +import unittest +import sys +import paddle +import paddle.fluid as fluid +import paddle.fluid.compiler as compiler + +paddle.enable_static() +SEED = 2021 + + +@unittest.skipIf(not paddle.is_compiled_with_ipu(), + "core is not compiled with IPU") +class TestConvNet(unittest.TestCase): + def test_training(self): + ipu_strategy = compiler.get_ipu_strategy() + + assert ipu_strategy.num_ipus == 1, "Default num_ipus must be 1" + assert ipu_strategy.is_training == True, "Default is_training is True" + assert ipu_strategy.enable_pipelining == False, \ + "Default enable_pipelining is False" + assert ipu_strategy.enable_manual_shard == False, \ + "Default enable_manual_shard is False" + + ipu_strategy.num_ipus = 2 + assert ipu_strategy.num_ipus == 2, "Set num_ipus Failed" + + ipu_strategy.is_training = False + assert ipu_strategy.is_training == False, "Set is_training Failed" + + ipu_strategy.enable_pipelining = True + assert ipu_strategy.enable_pipelining == True, \ + "Set enable_pipelining Failed" + + ipu_strategy.enable_manual_shard = True + assert ipu_strategy.enable_manual_shard == True, \ + "Set enable_manual_shard Failed" + + +if __name__ == "__main__": + unittest.main()