Skip to content

Commit

Permalink
Zoltan: Remove use of hardcoded MPI_COMM_WORLD
Browse files Browse the repository at this point in the history
  • Loading branch information
tlamonthezie authored and thearusable committed Sep 1, 2023
1 parent 004eee5 commit 9f760ee
Show file tree
Hide file tree
Showing 59 changed files with 401 additions and 322 deletions.
2 changes: 1 addition & 1 deletion packages/zoltan/README.developer
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ printf("GID: " ZOLTAN_ID_SPEC ", LID %d\n", my_gid, my_lid);

To send a ZOLTAN_ID_TYPE in an MPI message, use ZOLTAN_ID_MPI_TYPE:

MPI_Bcast(&gid, 1, ZOLTAN_ID_MPI_TYPE, 0, MPI_COMM_WORLD);
MPI_Bcast(&gid, 1, ZOLTAN_ID_MPI_TYPE, 0, zoltan_get_global_comm());

To silence compiler warnings, you can properly specify a constant of type ZOLTAN_ID_TYPE using ZOLTAN_ID_CONSTANT:

Expand Down
1 change: 1 addition & 0 deletions packages/zoltan/src/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -482,6 +482,7 @@ APPEND_SET(SOURCES
Utilities/Communication/comm_do_reverse.c
Utilities/Communication/comm_info.c
Utilities/Communication/comm_create.c
Utilities/Communication/comm_default.c
Utilities/Communication/comm_resize.c
Utilities/Communication/comm_sort_ints.c
Utilities/Communication/comm_destroy.c
Expand Down
75 changes: 75 additions & 0 deletions packages/zoltan/src/Utilities/Communication/comm_default.c
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
/*
* @HEADER
*
* ***********************************************************************
*
* Zoltan Toolkit for Load-balancing, Partitioning, Ordering and Coloring
* Copyright 2012 Sandia Corporation
*
* Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
* the U.S. Government retains certain rights in this software.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. Neither the name of the Corporation nor the names of the
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Questions? Contact Karen Devine [email protected]
* Erik Boman [email protected]
*
* ***********************************************************************
*
* @HEADER
*/

#include "comm.h"
#include <pthread.h>

#ifdef __cplusplus
/* if C++, define the rest of this header file as extern C */
extern "C" {
#endif

static pthread_mutex_t zoltan_global_mpi_lock;
static MPI_Comm Zoltan_Global_MPI_Comm = MPI_COMM_WORLD; // CHECK: ALLOW MPI_COMM_WORLD

/* Function to set the default communicator */
inline void zoltan_initialize_global_comm(MPI_Comm comm) {
pthread_mutex_lock(&zoltan_global_mpi_lock);
Zoltan_Global_MPI_Comm = comm;
pthread_mutex_unlock(&zoltan_global_mpi_lock);
}

/* Function to get the default communicator */
inline MPI_Comm zoltan_get_global_comm() {
pthread_mutex_lock(&zoltan_global_mpi_lock);
MPI_Comm comm = Zoltan_Global_MPI_Comm;
pthread_mutex_unlock(&zoltan_global_mpi_lock);
return comm;
}

#ifdef __cplusplus
} /* closing bracket for extern "C" */
#endif
2 changes: 1 addition & 1 deletion packages/zoltan/src/Utilities/Communication/comm_do.c
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ char *recv_data) /* array of data I'll own after comm */

/* Check input parameters */
if (!plan) {
MPI_Comm_rank(MPI_COMM_WORLD, &my_proc);
MPI_Comm_rank(zoltan_get_global_comm(), &my_proc);
ZOLTAN_COMM_ERROR("Communication plan = NULL", yo, my_proc);
return ZOLTAN_FATAL;
}
Expand Down
2 changes: 1 addition & 1 deletion packages/zoltan/src/Utilities/Communication/comm_info.c
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ int i, j, k, my_proc;

/* Check input parameters */
if (!plan) {
MPI_Comm_rank(MPI_COMM_WORLD, &my_proc);
MPI_Comm_rank(zoltan_get_global_comm(), &my_proc);
ZOLTAN_COMM_ERROR("Communication plan = NULL", yo, my_proc);
return ZOLTAN_FATAL;
}
Expand Down
4 changes: 2 additions & 2 deletions packages/zoltan/src/Utilities/Memory/mem.c
Original file line number Diff line number Diff line change
Expand Up @@ -79,8 +79,8 @@ static int nfree = 0; /* number of calls to free */
#ifdef ZOLTAN_NO_MPI
#define GET_RANK(a) *(a)=0
#else
#include <mpi.h>
#define GET_RANK(a) MPI_Comm_rank(MPI_COMM_WORLD, (a))
#include <zoltan_comm.h>
#define GET_RANK(a) MPI_Comm_rank(zoltan_get_global_comm(), (a))
#endif

#define MAX_STRING_LEN 50
Expand Down
3 changes: 2 additions & 1 deletion packages/zoltan/src/Utilities/Timer/zoltan_timer.c
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@
#include "zoltan_types.h"
#include "zoltan_util.h"
#include "zoltan_mem.h"
#include "zoltan_comm.h"

#ifdef VAMPIR
#include <VT.h>
Expand Down Expand Up @@ -88,7 +89,7 @@ extern "C" {
#define FATALERROR(yo, str) \
{ \
int ppproc; \
MPI_Comm_rank(MPI_COMM_WORLD, &ppproc); \
MPI_Comm_rank(zoltan_get_global_comm(), &ppproc); \
ZOLTAN_PRINT_ERROR(ppproc, yo, str); \
return ZOLTAN_FATAL; \
}
Expand Down
2 changes: 1 addition & 1 deletion packages/zoltan/src/ch/ch_init_dist.c
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ int max_assignment, have_assignments;
/* Broadcast initial assignments if they exist.
* Assignments can be used for partitions and/or processors.
*/
MPI_Comm_rank(MPI_COMM_WORLD, &proc);
MPI_Comm_rank(zoltan_get_global_comm(), &proc);

/* First, tell other processors whether the assignments array is NULL. */
if (proc == host_proc)
Expand Down
10 changes: 5 additions & 5 deletions packages/zoltan/src/driver/dr_chaco_io.c
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ int read_chaco_file(int Proc,
file_error = (fp == NULL);
}

MPI_Bcast(&file_error, 1, MPI_INT, 0, MPI_COMM_WORLD);
MPI_Bcast(&file_error, 1, MPI_INT, 0, zoltan_get_global_comm());

if (file_error) {
sprintf(cmesg, "fatal: Could not open Chaco graph file %s",
Expand Down Expand Up @@ -236,14 +236,14 @@ for (i=0; i<nvtxs; i++) { /* move 2/3 of points much closer to "a" */

/* Distribute graph */

if (!chaco_dist_graph(MPI_COMM_WORLD, pio_info, 0, &gnvtxs, &nvtxs,
if (!chaco_dist_graph(zoltan_get_global_comm(), pio_info, 0, &gnvtxs, &nvtxs,
&start, &adj, &vwgt_dim, &vwgts, &ewgt_dim, &ewgts,
&ndim, &x, &y, &z, &assignments)) {
Gen_Error(0, "fatal: Error returned from chaco_dist_graph");
return 0;
}

MPI_Bcast(&base, 1, MPI_INT, 0, MPI_COMM_WORLD);
MPI_Bcast(&base, 1, MPI_INT, 0, zoltan_get_global_comm());

if (!chaco_setup_mesh_struct(Proc, Num_Proc, prob, mesh, pio_info, gnvtxs, nvtxs,
start, adj, vwgt_dim, vwgts, ewgt_dim, ewgts,
Expand Down Expand Up @@ -341,7 +341,7 @@ int i;
* Each element has one set of coordinates (i.e., node) if a coords file
* was provided; zero otherwise.
*/
MPI_Bcast( &no_geom, 1, MPI_INT, 0, MPI_COMM_WORLD);
MPI_Bcast( &no_geom, 1, MPI_INT, 0, zoltan_get_global_comm());
if (no_geom)
mesh->eb_nnodes[0] = 0;
else
Expand Down Expand Up @@ -551,7 +551,7 @@ void chaco_init_local_ids(
int i;
int Proc;

MPI_Comm_rank(MPI_COMM_WORLD, &Proc);
MPI_Comm_rank(zoltan_get_global_comm(), &Proc);

*num_vtx = ch_dist_max_num_vtx(assignments);
*vtx_list = (int *) malloc(((int)*num_vtx) * sizeof(int));
Expand Down
8 changes: 4 additions & 4 deletions packages/zoltan/src/driver/dr_chaco_io.c.shockstem
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ int read_chaco_file(int Proc,
file_error = (fp == NULL);
}

MPI_Bcast(&file_error, 1, MPI_INT, 0, MPI_COMM_WORLD);
MPI_Bcast(&file_error, 1, MPI_INT, 0, zoltan_get_global_comm());

if (file_error) {
sprintf(cmesg, "fatal: Could not open Chaco graph file %s",
Expand Down Expand Up @@ -218,7 +218,7 @@ printf("%d KDDKDD NEARESTSFILE %s\n", Proc, chaco_fname); fflush(stdout);
}

/* Distribute graph */
if (!chaco_dist_graph(MPI_COMM_WORLD, pio_info, 0, &gnvtxs, &nvtxs,
if (!chaco_dist_graph(zoltan_get_global_comm(), pio_info, 0, &gnvtxs, &nvtxs,
&start, &adj, &vwgt_dim, &vwgts, &ewgt_dim, &ewgts,
&ndim, &x, &y, &z, &assignments) != 0) {
Gen_Error(0, "fatal: Error returned from chaco_dist_graph");
Expand Down Expand Up @@ -315,7 +315,7 @@ int i;
* Each element has one set of coordinates (i.e., node) if a coords file
* was provided; zero otherwise.
*/
MPI_Bcast( &no_geom, 1, MPI_INT, 0, MPI_COMM_WORLD);
MPI_Bcast( &no_geom, 1, MPI_INT, 0, zoltan_get_global_comm());
if (no_geom)
mesh->eb_nnodes[0] = 0;
else
Expand Down Expand Up @@ -516,7 +516,7 @@ void chaco_init_local_ids(
int i;
int Proc;

MPI_Comm_rank(MPI_COMM_WORLD, &Proc);
MPI_Comm_rank(zoltan_get_global_comm(), &Proc);

*num_vtx = ch_dist_max_num_vtx(assignments);
*vtx_list = (int *) malloc(*num_vtx * sizeof(int));
Expand Down
4 changes: 2 additions & 2 deletions packages/zoltan/src/driver/dr_dd.c
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,8 @@ int build_elem_dd(MESH_INFO_PTR mesh)
int maxelems;

MPI_Allreduce(&(mesh->num_elems), &maxelems, 1, MPI_INT, MPI_MAX,
MPI_COMM_WORLD);
if (Zoltan_DD_Create(&(mesh->dd), MPI_COMM_WORLD, 1, 0, 0, maxelems, 0) != 0){
zoltan_get_global_comm());
if (Zoltan_DD_Create(&(mesh->dd), zoltan_get_global_comm(), 1, 0, 0, maxelems, 0) != 0){
Gen_Error(0, "fatal: NULL returned from Zoltan_DD_Create()\n");
return 0;
}
Expand Down
2 changes: 1 addition & 1 deletion packages/zoltan/src/driver/dr_ddCPP.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ int build_elem_dd(MESH_INFO_PTR mesh)
{
destroy_elem_dd();

dd = new Zoltan_DD(MPI_COMM_WORLD, 1, 0, 0, 0, 0);
dd = new Zoltan_DD(zoltan_get_global_comm(), 1, 0, 0, 0, 0);

return update_elem_dd(mesh);
}
Expand Down
2 changes: 1 addition & 1 deletion packages/zoltan/src/driver/dr_err.c
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ void error_report(int Proc)
}
}

MPI_Abort(MPI_COMM_WORLD, -1);
MPI_Abort(zoltan_get_global_comm(), -1);
}

#ifdef __cplusplus
Expand Down
20 changes: 10 additions & 10 deletions packages/zoltan/src/driver/dr_eval.c
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ ZOLTAN_ID_TYPE gsumcuts, gmaxcuts, gmincuts, elemcount;
ZOLTAN_ID_TYPE gsumelems, gmaxelems, gminelems;
double gsumload, gmaxload, gminload;

MPI_Comm_rank(MPI_COMM_WORLD, &proc);
MPI_Comm_rank(zoltan_get_global_comm(), &proc);

for (i = 0; i < mesh->necmap; i++) {
cuts += mesh->ecmap_cnt[i];
Expand All @@ -89,19 +89,19 @@ double gsumload, gmaxload, gminload;
load += mesh->elements[i].cpu_wgt[0];
}

MPI_Allreduce(&cuts, &gsumcuts, 1, ZOLTAN_ID_MPI_TYPE, MPI_SUM, MPI_COMM_WORLD);
MPI_Allreduce(&cuts, &gmaxcuts, 1, ZOLTAN_ID_MPI_TYPE, MPI_MAX, MPI_COMM_WORLD);
MPI_Allreduce(&cuts, &gmincuts, 1, ZOLTAN_ID_MPI_TYPE, MPI_MIN, MPI_COMM_WORLD);
MPI_Allreduce(&cuts, &gsumcuts, 1, ZOLTAN_ID_MPI_TYPE, MPI_SUM, zoltan_get_global_comm());
MPI_Allreduce(&cuts, &gmaxcuts, 1, ZOLTAN_ID_MPI_TYPE, MPI_MAX, zoltan_get_global_comm());
MPI_Allreduce(&cuts, &gmincuts, 1, ZOLTAN_ID_MPI_TYPE, MPI_MIN, zoltan_get_global_comm());

elemcount = mesh->num_elems - mesh->blank_count;

MPI_Allreduce(&elemcount, &gsumelems, 1, ZOLTAN_ID_MPI_TYPE, MPI_SUM, MPI_COMM_WORLD);
MPI_Allreduce(&elemcount, &gmaxelems, 1, ZOLTAN_ID_MPI_TYPE, MPI_MAX, MPI_COMM_WORLD);
MPI_Allreduce(&elemcount, &gminelems, 1, ZOLTAN_ID_MPI_TYPE, MPI_MIN, MPI_COMM_WORLD);
MPI_Allreduce(&elemcount, &gsumelems, 1, ZOLTAN_ID_MPI_TYPE, MPI_SUM, zoltan_get_global_comm());
MPI_Allreduce(&elemcount, &gmaxelems, 1, ZOLTAN_ID_MPI_TYPE, MPI_MAX, zoltan_get_global_comm());
MPI_Allreduce(&elemcount, &gminelems, 1, ZOLTAN_ID_MPI_TYPE, MPI_MIN, zoltan_get_global_comm());

MPI_Allreduce(&load, &gsumload, 1, MPI_DOUBLE, MPI_SUM, MPI_COMM_WORLD);
MPI_Allreduce(&load, &gmaxload, 1, MPI_DOUBLE, MPI_MAX, MPI_COMM_WORLD);
MPI_Allreduce(&load, &gminload, 1, MPI_DOUBLE, MPI_MIN, MPI_COMM_WORLD);
MPI_Allreduce(&load, &gsumload, 1, MPI_DOUBLE, MPI_SUM, zoltan_get_global_comm());
MPI_Allreduce(&load, &gmaxload, 1, MPI_DOUBLE, MPI_MAX, zoltan_get_global_comm());
MPI_Allreduce(&load, &gminload, 1, MPI_DOUBLE, MPI_MIN, zoltan_get_global_comm());

if (proc == 0) {
printf("DRIVER EVAL: load: max %f min %f sum %f\n",
Expand Down
8 changes: 4 additions & 4 deletions packages/zoltan/src/driver/dr_exoII_io.c
Original file line number Diff line number Diff line change
Expand Up @@ -230,9 +230,9 @@ int read_exoII_file(int Proc,

/* Perform reduction on necessary fields of element blocks. kdd 2/2001 */
MPI_Allreduce(nnodes, mesh->eb_nnodes, mesh->num_el_blks, MPI_INT, MPI_MAX,
MPI_COMM_WORLD);
zoltan_get_global_comm());
MPI_Allreduce(etypes, mesh->eb_etypes, mesh->num_el_blks, MPI_INT, MPI_MIN,
MPI_COMM_WORLD);
zoltan_get_global_comm());
for (i = 0; i < mesh->num_el_blks; i++) {
strcpy(mesh->eb_names[i], get_elem_name(mesh->eb_etypes[i]));
}
Expand Down Expand Up @@ -893,7 +893,7 @@ static int read_comm_map_info(int pexoid, int Proc, PROB_INFO_PTR prob,
* for the adjacent elements in this communication map.
*/

ierr = Zoltan_Comm_Create(&comm_obj, max_len, proc_ids, MPI_COMM_WORLD,
ierr = Zoltan_Comm_Create(&comm_obj, max_len, proc_ids, zoltan_get_global_comm(),
msg, &nrecv);
if (ierr != ZOLTAN_OK) {
Gen_Error(0, "fatal: Error returned from Zoltan_Comm_Create");
Expand Down Expand Up @@ -1016,7 +1016,7 @@ char cmesg[256];
char *str = "Proc";

/* generate the parallel filename for this processor */
MPI_Comm_size(MPI_COMM_WORLD, &Num_Proc);
MPI_Comm_size(zoltan_get_global_comm(), &Num_Proc);
gen_par_filename(pio_info->pexo_fname, tmp_nem_fname, pio_info, Proc,
Num_Proc);
/*
Expand Down
8 changes: 4 additions & 4 deletions packages/zoltan/src/driver/dr_exoII_ioCPP.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -226,9 +226,9 @@ int read_exoII_file(int Proc,

/* Perform reduction on necessary fields of element blocks. kdd 2/2001 */
MPI_Allreduce(nnodes, mesh->eb_nnodes, mesh->num_el_blks,
MPI_INT, MPI_MAX, MPI_COMM_WORLD) ;
MPI_INT, MPI_MAX, zoltan_get_global_comm()) ;
MPI_Allreduce(etypes, mesh->eb_etypes, mesh->num_el_blks,
MPI_INT, MPI_MIN, MPI_COMM_WORLD);
MPI_INT, MPI_MIN, zoltan_get_global_comm());
for (i = 0; i < mesh->num_el_blks; i++) {
strcpy(mesh->eb_names[i], get_elem_name(mesh->eb_etypes[i]));
}
Expand Down Expand Up @@ -888,7 +888,7 @@ static int read_comm_map_info(int pexoid, int Proc, PROB_INFO_PTR prob,
* for the adjacent elements in this communication map.
*/

comm_obj = new Zoltan_Comm(max_len, proc_ids, MPI_COMM_WORLD, msg, &nrecv);
comm_obj = new Zoltan_Comm(max_len, proc_ids, zoltan_get_global_comm(), msg, &nrecv);

if (nrecv != max_len) {
Gen_Error(0, "fatal: Error returned from Zoltan_Comm constructor");
Expand Down Expand Up @@ -990,7 +990,7 @@ char cmesg[256];

/* generate the parallel filename for this processor */
int Num_Proc = 0;
MPI_Comm_size(MPI_COMM_WORLD, &Num_Proc);
MPI_Comm_size(zoltan_get_global_comm(), &Num_Proc);

gen_par_filename(pio_info->pexo_fname, tmp_nem_fname, pio_info, Proc,
Num_Proc);
Expand Down
10 changes: 5 additions & 5 deletions packages/zoltan/src/driver/dr_gnuplot.c
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ int output_gnu(const char *cmd_file,
/* Sort by part numbers. Assumes # parts >= # proc. */
if (nelems > 0)
Zoltan_quicksort_pointer_inc_int_int(index, parts, NULL, 0, nelems-1);
MPI_Allreduce(&max_part, &gmax_part, 1, MPI_INT, MPI_MAX, MPI_COMM_WORLD);
MPI_Allreduce(&max_part, &gmax_part, 1, MPI_INT, MPI_MAX, zoltan_get_global_comm());
gnum_part = gmax_part + 1;
}

Expand Down Expand Up @@ -267,10 +267,10 @@ int output_gnu(const char *cmd_file,
}
}

MPI_Reduce(&locMinX,&globMinX,1,MPI_FLOAT,MPI_MIN,0,MPI_COMM_WORLD);
MPI_Reduce(&locMinY,&globMinY,1,MPI_FLOAT,MPI_MIN,0,MPI_COMM_WORLD);
MPI_Reduce(&locMaxX,&globMaxX,1,MPI_FLOAT,MPI_MAX,0,MPI_COMM_WORLD);
MPI_Reduce(&locMaxY,&globMaxY,1,MPI_FLOAT,MPI_MAX,0,MPI_COMM_WORLD);
MPI_Reduce(&locMinX,&globMinX,1,MPI_FLOAT,MPI_MIN,0,zoltan_get_global_comm());
MPI_Reduce(&locMinY,&globMinY,1,MPI_FLOAT,MPI_MIN,0,zoltan_get_global_comm());
MPI_Reduce(&locMaxX,&globMaxX,1,MPI_FLOAT,MPI_MAX,0,zoltan_get_global_comm());
MPI_Reduce(&locMaxY,&globMaxY,1,MPI_FLOAT,MPI_MAX,0,zoltan_get_global_comm());

}
else if (pio_info->file_type == NEMESIS_FILE) { /* Nemesis input file */
Expand Down
Loading

0 comments on commit 9f760ee

Please sign in to comment.