From 67a476ac7a50983ce7569801630c0e13a120b628 Mon Sep 17 00:00:00 2001 From: niuliling123 Date: Wed, 26 Jul 2023 05:21:33 +0000 Subject: [PATCH 1/4] Set bf16 list --- python/paddle/amp/amp_lists.py | 2 +- python/paddle/static/amp/bf16/amp_lists.py | 28 +++------------------- 2 files changed, 4 insertions(+), 26 deletions(-) diff --git a/python/paddle/amp/amp_lists.py b/python/paddle/amp/amp_lists.py index 3aefd298340a5..277e7faf6f8f1 100644 --- a/python/paddle/amp/amp_lists.py +++ b/python/paddle/amp/amp_lists.py @@ -90,7 +90,7 @@ 'scatter', } -BF16_WHITE_LIST = {'conv2d', 'einsum', 'matmul_v2'} +BF16_WHITE_LIST = {'conv2d', 'matmul', 'matmul_v2', 'mul'} BF16_BLACK_LIST = set() diff --git a/python/paddle/static/amp/bf16/amp_lists.py b/python/paddle/static/amp/bf16/amp_lists.py index 5ea5beb708b89..cd4d6bdb329b4 100644 --- a/python/paddle/static/amp/bf16/amp_lists.py +++ b/python/paddle/static/amp/bf16/amp_lists.py @@ -14,6 +14,7 @@ import copy +from paddle.amp.amp_lists import BF16_WHITE_LIST from paddle.fluid import core from ..fp16_lists import black_list as black_list_fp16 @@ -86,33 +87,10 @@ def _update_list(self): bf16_initializer_list = {'fill_constant', 'uniform_random'} # always bf16 -bf16_list = { - 'conv2d', - 'matmul', - 'matmul_v2', - 'mul', -} +bf16_list = BF16_WHITE_LIST # depends on the prev_op type -gray_list = { - 'elementwise_add', - 'elementwise_sub', - 'elementwise_mul', - 'elementwise_div', - 'relu', - 'layer_norm', - 'slice', - 'concat', - 'uniform_random', - 'reshape2', - 'transpose2', - 'pool2d', - 'sigmoid', - 'cast', - 'scale', - 'fill_constant', - 'split', -} +gray_list = gray_list_fp16 _, _, _sys_unsupported_bf16_list = core.op_supported_infos( 'CPU', core.VarDesc.VarType.BF16 From 153fb2bed1ea93b79dd389accb148f72cf2c9c88 Mon Sep 17 00:00:00 2001 From: niuliling123 Date: Mon, 21 Aug 2023 02:58:00 +0000 Subject: [PATCH 2/4] update --- python/paddle/amp/amp_lists.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/paddle/amp/amp_lists.py b/python/paddle/amp/amp_lists.py index 277e7faf6f8f1..da4314470f4c8 100644 --- a/python/paddle/amp/amp_lists.py +++ b/python/paddle/amp/amp_lists.py @@ -91,7 +91,7 @@ } BF16_WHITE_LIST = {'conv2d', 'matmul', 'matmul_v2', 'mul'} -BF16_BLACK_LIST = set() +BF16_BLACK_LIST = FP16_BLACK_LIST # At OD level, ops in WHITE_LIST will use FP16/BF16 and the others will use FP32. From 3b7984dc36081e17e71ebb974861f9b910a64368 Mon Sep 17 00:00:00 2001 From: niuliling123 Date: Mon, 21 Aug 2023 17:27:59 +0800 Subject: [PATCH 3/4] merge bf16_gray to fp16_gray_list --- test/contrib/test_bf16_utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/contrib/test_bf16_utils.py b/test/contrib/test_bf16_utils.py index c44e5a4a97481..75ce0045b39ab 100644 --- a/test/contrib/test_bf16_utils.py +++ b/test/contrib/test_bf16_utils.py @@ -46,10 +46,10 @@ def test_amp_lists_1(self): def test_amp_lists_2(self): # 2. w={'tanh'}, b=None - self.fp32_list.remove('tanh') - self.bf16_list.add('tanh') + self.fp32_list.remove('tan') + self.bf16_list.add('tan') - self.amp_lists_ = amp.bf16.AutoMixedPrecisionListsBF16({'tanh'}) + self.amp_lists_ = amp.bf16.AutoMixedPrecisionListsBF16({'tan'}) def test_amp_lists_3(self): # 3. w={'lstm'}, b=None From dcd5f734756afd4d09bafcb97e1863012b586b63 Mon Sep 17 00:00:00 2001 From: niuliling123 Date: Wed, 23 Aug 2023 14:44:56 +0800 Subject: [PATCH 4/4] update amp_list --- python/paddle/amp/amp_lists.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/python/paddle/amp/amp_lists.py b/python/paddle/amp/amp_lists.py index da4314470f4c8..7d014b1bf14f9 100644 --- a/python/paddle/amp/amp_lists.py +++ b/python/paddle/amp/amp_lists.py @@ -12,22 +12,28 @@ # See the License for the specific language governing permissions and # limitations under the License. -# The set of ops that support fp16 calculation and are considered numerically- -# safe and performance-critical. These ops are always converted to fp16. -FP16_WHITE_LIST = { +# The set of ops that support fp16 and bf16 calculation and are considered numerically- +# safe and performance-critical. These ops are always converted to fp16 or bf16. +WHITE_LIST = { 'conv2d', 'einsum', 'matmul', 'matmul_v2', 'max_pool2d_with_index', 'mul', + 'fused_gemm_epilogue', +} + +# The set of ops that support fp16, and bf16 was unsupported. +ONLY_FP16_WHITE_LIST = { 'fake_quantize_dequantize_abs_max', 'fake_quantize_dequantize_moving_average_abs_max', - 'fused_gemm_epilogue', 'fused_attention', 'fused_feedforward', } +FP16_WHITE_LIST = WHITE_LIST | ONLY_FP16_WHITE_LIST + # The set of ops that support fp16 calculation and are considered numerically- # dangerous and whose effects may also be observed in downstream ops. FP16_BLACK_LIST = { @@ -90,7 +96,7 @@ 'scatter', } -BF16_WHITE_LIST = {'conv2d', 'matmul', 'matmul_v2', 'mul'} +BF16_WHITE_LIST = WHITE_LIST BF16_BLACK_LIST = FP16_BLACK_LIST