Skip to content

Commit

Permalink
fix dygraph auto-gen auto_parallel code when some output is null (#64813
Browse files Browse the repository at this point in the history
)
  • Loading branch information
zhiqiu authored Jun 3, 2024
1 parent 36dad6a commit 66eecef
Show file tree
Hide file tree
Showing 3 changed files with 48 additions and 33 deletions.
4 changes: 2 additions & 2 deletions paddle/phi/api/generator/dist_api_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,7 @@
}}
std::vector<phi::MetaTensor*> {name}_meta_ptr_vec({name}.size());
for (size_t i = 0; i < {name}_meta_vec.size(); ++i) {{
{name}_meta_ptr_vec[i] = &{name}_meta_vec[i];
{name}_meta_ptr_vec[i] = {name}[i] ? &{name}_meta_vec[i] : nullptr;
}}
"""
INFER_GLOBAL_SHAPE_TEMPLATE = """
Expand Down Expand Up @@ -400,7 +400,7 @@
std::vector<phi::MetaTensor> {name}_meta_vec = MakeMetaTensor({name});
std::vector<phi::MetaTensor*> {name}_meta_ptr_vec({name}_meta_vec.size());
for (size_t i = 0; i < {name}_meta_vec.size(); ++i) {{
{name}_meta_ptr_vec[i] = &{name}_meta_vec[i];
{name}_meta_ptr_vec[i] = {name}[i] ? &{name}_meta_vec[i] : nullptr;
}}
"""
INFER_META_TEMPLATE = """
Expand Down
76 changes: 45 additions & 31 deletions paddle/phi/api/generator/dist_bw_api_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,33 +53,41 @@
std::shared_ptr<phi::distributed::DistTensor> shared_dist_out =
CreateKernelDistOutput({}, !rank_is_in_current_mesh, spmd_info.second[0]);
phi::distributed::DistTensor* dist_out = shared_dist_out.get();
phi::DenseTensor* dense_out = dist_out->unsafe_mutable_value();
if (dense_out && !rank_is_in_current_mesh && !dist_out->defined()) {{
*dense_out = phi::DenseTensor(
phi::DenseTensor* dense_out = nullptr;
if (dist_out) {{
dense_out = dist_out->unsafe_mutable_value();
if (dense_out && !rank_is_in_current_mesh && !dist_out->defined()) {{
*dense_out = phi::DenseTensor(
std::make_shared<phi::Allocation>(nullptr, 0, phi::distributed::GetDefaultPlace()),
phi::DenseTensorMeta());
}}
}}
"""
SINGLE_OUT_CREATION_TEMPLATE = """
std::shared_ptr<phi::distributed::DistTensor> shared_dist_out =
CreateKernelDistOutput({}, !rank_is_in_current_mesh);
phi::distributed::DistTensor* dist_out = shared_dist_out.get();
phi::DenseTensor* dense_out = dist_out->unsafe_mutable_value();
if (dense_out && !rank_is_in_current_mesh && !dist_out->defined()) {{
phi::DenseTensor* dense_out = nullptr;
if (dist_out) {{
dense_out = dist_out->unsafe_mutable_value();
if (dense_out && !rank_is_in_current_mesh && !dist_out->defined()) {{
*dense_out = phi::DenseTensor(
std::make_shared<phi::Allocation>(nullptr, 0, phi::distributed::GetDefaultPlace()),
phi::DenseTensorMeta());
std::make_shared<phi::Allocation>(nullptr, 0, phi::distributed::GetDefaultPlace()),
phi::DenseTensorMeta());
}}
}}
"""
VECTOR_OUT_CREATION_TEMPLATE_WITH_NO_SPMD = """
auto dist_out = SetKernelDistOutput({name});
std::vector<phi::DenseTensor*> dense_out(dist_out.size());
std::vector<phi::DenseTensor*> dense_out(dist_out.size(), nullptr);
for (size_t i=0; i<dist_out.size(); i++) {{
dense_out[i] = dist_out[i]->unsafe_mutable_value();
if (dense_out[i] && !rank_is_in_current_mesh && !dist_out[i]->defined()) {{
*dense_out[i] = phi::DenseTensor(
std::make_shared<phi::Allocation>(nullptr, 0, phi::distributed::GetDefaultPlace()),
phi::DenseTensorMeta());
if (dist_out[i]) {{
dense_out[i] = dist_out[i]->unsafe_mutable_value();
if (dense_out[i] && !rank_is_in_current_mesh && !dist_out[i]->defined()) {{
*dense_out[i] = phi::DenseTensor(
std::make_shared<phi::Allocation>(nullptr, 0, phi::distributed::GetDefaultPlace()),
phi::DenseTensorMeta());
}}
}}
}}
"""
Expand All @@ -90,13 +98,15 @@
for(auto& e: shared_dist_out){{
dist_out.push_back(e.get());
}}
std::vector<phi::DenseTensor*> dense_out(dist_out.size());
std::vector<phi::DenseTensor*> dense_out(dist_out.size(), nullptr);
for (size_t i=0; i<dist_out.size(); i++) {{
dense_out[i] = dist_out[i]->unsafe_mutable_value();
if (dense_out[i] && !rank_is_in_current_mesh && !dist_out[i]->defined()) {{
*dense_out[i] = phi::DenseTensor(
std::make_shared<phi::Allocation>(nullptr, 0, phi::distributed::GetDefaultPlace()),
phi::DenseTensorMeta());
if (dist_out[i]) {{
dense_out[i] = dist_out[i]->unsafe_mutable_value();
if (dense_out[i] && !rank_is_in_current_mesh && !dist_out[i]->defined()) {{
*dense_out[i] = phi::DenseTensor(
std::make_shared<phi::Allocation>(nullptr, 0, phi::distributed::GetDefaultPlace()),
phi::DenseTensorMeta());
}}
}}
}}
"""
Expand All @@ -108,13 +118,15 @@
for(auto& e: shared_dist_out){{
dist_out.push_back(e.get());
}}
std::vector<phi::DenseTensor*> dense_out(dist_out.size());
std::vector<phi::DenseTensor*> dense_out(dist_out.size(), nullptr);
for (size_t i=0; i<dist_out.size(); i++) {{
dense_out[i] = dist_out[i]->unsafe_mutable_value();
if (dense_out[i] && !rank_is_in_current_mesh && !dist_out[i]->defined()) {{
*dense_out[i] = phi::DenseTensor(
std::make_shared<phi::Allocation>(nullptr, 0, phi::distributed::GetDefaultPlace()),
phi::DenseTensorMeta());
if (dist_out[i]) {{
dense_out[i] = dist_out[i]->unsafe_mutable_value();
if (dense_out[i] && !rank_is_in_current_mesh && !dist_out[i]->defined()) {{
*dense_out[i] = phi::DenseTensor(
std::make_shared<phi::Allocation>(nullptr, 0, phi::distributed::GetDefaultPlace()),
phi::DenseTensorMeta());
}}
}}
}}
"""
Expand Down Expand Up @@ -156,13 +168,15 @@
"""
MULTI_VECTOR_OUT_CREATION_TEMPLATE = """
auto dist_out_{i} = SetKernelDistOutput({name});
std::vector<phi::DenseTensor*> dense_out_{i}(dist_out_{i}.size());
std::vector<phi::DenseTensor*> dense_out_{i}(dist_out_{i}.size(), nullptr);
for (size_t i = 0; i < dist_out_{i}.size(); i++) {{
dense_out_{i}[i] = const_cast<phi::DenseTensor*>(&dist_out_{i}[i]->value());
if (dense_out_{i}[i] && !rank_is_in_current_mesh && !dist_out_{i}[i]->defined()) {{
*dense_out_{i}[i]= phi::DenseTensor(
std::make_shared<phi::Allocation>(nullptr, 0, phi::distributed::GetDefaultPlace()),
phi::DenseTensorMeta());
if (dist_out_{i}[i]) {{
dense_out_{i}[i] = const_cast<phi::DenseTensor*>(&dist_out_{i}[i]->value());
if (dense_out_{i}[i] && !rank_is_in_current_mesh && !dist_out_{i}[i]->defined()) {{
*dense_out_{i}[i]= phi::DenseTensor(
std::make_shared<phi::Allocation>(nullptr, 0, phi::distributed::GetDefaultPlace()),
phi::DenseTensorMeta());
}}
}}
}}
"""
Expand Down
1 change: 1 addition & 0 deletions paddle/phi/api/lib/api_gen_utils.cc
Original file line number Diff line number Diff line change
Expand Up @@ -736,6 +736,7 @@ std::shared_ptr<phi::distributed::DistTensor> CreateKernelDistOutput(
}
return dist_output;
}
VLOG(4) << "CreateKernelDistOutput with NULL out";
return nullptr;
}

Expand Down

0 comments on commit 66eecef

Please sign in to comment.