From 6adb2866cbb4d17aff8ba4086c605d92faa02cd7 Mon Sep 17 00:00:00 2001 From: risemeup1 <515586620@qq.com> Date: Sun, 21 May 2023 10:13:26 +0000 Subject: [PATCH] fix gcc12 error of coverage_ci --- paddle/phi/core/device_context.cc | 1 + paddle/phi/core/utils/array.h | 2 +- paddle/phi/kernels/funcs/detail/gru_cpu_kernel.h | 8 ++++---- paddle/phi/kernels/funcs/jit/more/intrinsic/layer_norm.cc | 2 +- 4 files changed, 7 insertions(+), 6 deletions(-) diff --git a/paddle/phi/core/device_context.cc b/paddle/phi/core/device_context.cc index 5c8fc75ff0ee2..14123e0e0ca81 100644 --- a/paddle/phi/core/device_context.cc +++ b/paddle/phi/core/device_context.cc @@ -297,6 +297,7 @@ struct DeviceContext::Impl { DeviceContext::DeviceContext() { impl_ = std::make_unique(); } DeviceContext::DeviceContext(const DeviceContext& other) { + impl_ = std::make_unique(); impl_->SetHostAllocator(&other.GetHostAllocator()); impl_->SetAllocator(&other.GetAllocator()); impl_->SetZeroAllocator(&other.GetZeroAllocator()); diff --git a/paddle/phi/core/utils/array.h b/paddle/phi/core/utils/array.h index b21e0bc088c49..44290b73737fb 100644 --- a/paddle/phi/core/utils/array.h +++ b/paddle/phi/core/utils/array.h @@ -85,7 +85,7 @@ class Array { return ptr + i; } - T data_[N]; + T data_[N] = {}; }; template diff --git a/paddle/phi/kernels/funcs/detail/gru_cpu_kernel.h b/paddle/phi/kernels/funcs/detail/gru_cpu_kernel.h index eb02200f01db0..e6d587a61e11a 100644 --- a/paddle/phi/kernels/funcs/detail/gru_cpu_kernel.h +++ b/paddle/phi/kernels/funcs/detail/gru_cpu_kernel.h @@ -130,7 +130,7 @@ void hl_avx_gru_forward_reset_output(OpResetOutput op_reset_output, #ifdef __AVX__ __m256 r_value_update_gate, r_value_update_gate_last = _mm256_set1_ps(0.0f); __m256 r_value_reset_gate, r_value_reset_gate_last = _mm256_set1_ps(0.0f); - __m256 r_value_reset_output; + __m256 r_value_reset_output = _mm256_setzero_ps(); __m256 r_prev_out = _mm256_set1_ps(0.0f), r_prev_out_last = _mm256_set1_ps(0.0f); __m256 r_reset_bias = _mm256_set1_ps(0.0f); @@ -932,13 +932,13 @@ inline void gru_backward(const Context &context, template inline void cpu_gru_backward(const Context &context, - OpGruGrad op_gru_grad, + OpGruGrad op_gru_grad UNUSED, phi::funcs::GRUMetaValue value, phi::funcs::GRUMetaGrad grad, int frame_size, int batch_size, - ActivationType active_node, - ActivationType active_gate) { + ActivationType active_node UNUSED, + ActivationType active_gate UNUSED) { for (int b = 0; b < batch_size; ++b) { // eigen gru_backward(context, value, grad, frame_size); diff --git a/paddle/phi/kernels/funcs/jit/more/intrinsic/layer_norm.cc b/paddle/phi/kernels/funcs/jit/more/intrinsic/layer_norm.cc index 54167261d1f31..04f2cdf2697fb 100644 --- a/paddle/phi/kernels/funcs/jit/more/intrinsic/layer_norm.cc +++ b/paddle/phi/kernels/funcs/jit/more/intrinsic/layer_norm.cc @@ -43,7 +43,7 @@ void LayerNorm(float* x, __m256 sum; __m256 mean_vec, var_vec; __m128 hi, lo; - __m256 tmp; + __m256 tmp = _mm256_setzero_ps(); size_t offset; size_t j; __m256 reverse_num_vec =