Skip to content

Commit

Permalink
Replace LIBMESH_CHKERR with LibmeshPetscCallA (#28929)
Browse files Browse the repository at this point in the history
  • Loading branch information
nmnobre committed Oct 29, 2024
1 parent 45cd50c commit 922ac4d
Show file tree
Hide file tree
Showing 8 changed files with 250 additions and 269 deletions.
3 changes: 1 addition & 2 deletions framework/src/base/MooseInit.C
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,7 @@ RegisterSigHandler()
MooseInit::MooseInit(int argc, char * argv[], MPI_Comm COMM_WORLD_IN)
: LibMeshInit(argc, argv, COMM_WORLD_IN)
{
auto ierr = PetscPopSignalHandler(); // get rid of PETSc error handler
LIBMESH_CHKERR(ierr);
LibmeshPetscCallA(COMM_WORLD_IN, PetscPopSignalHandler()); // get rid of PETSc error handler

// Set the number of OpenMP threads to the same as the number of threads libMesh is going to use
#ifdef LIBMESH_HAVE_OPENMP
Expand Down
225 changes: 104 additions & 121 deletions framework/src/preconditioners/VariableCondensationPreconditioner.C

Large diffs are not rendered by default.

8 changes: 3 additions & 5 deletions framework/src/systems/NonlinearEigenSystem.C
Original file line number Diff line number Diff line change
Expand Up @@ -83,11 +83,9 @@ assemble_matrix(EquationSystems & es, const std::string & system_name)
eigen_nl.eigenMatrixTag());
#if LIBMESH_HAVE_SLEPC
if (p->negativeSignEigenKernel())
{
auto ierr =
MatScale(static_cast<PetscMatrix<Number> &>(eigen_system.get_matrix_B()).mat(), -1.0);
LIBMESH_CHKERR(ierr);
}
LibmeshPetscCallA(
p->comm().get(),
MatScale(static_cast<PetscMatrix<Number> &>(eigen_system.get_matrix_B()).mat(), -1.0));
#endif
return;
}
Expand Down
12 changes: 4 additions & 8 deletions framework/src/utils/PetscSupport.C
Original file line number Diff line number Diff line change
Expand Up @@ -62,34 +62,30 @@ void
MooseVecView(NumericVector<Number> & vector)
{
PetscVector<Number> & petsc_vec = static_cast<PetscVector<Number> &>(vector);
auto ierr = VecView(petsc_vec.vec(), 0);
LIBMESH_CHKERR(ierr);
LibmeshPetscCallA(vector.comm().get(), VecView(petsc_vec.vec(), 0));
}

void
MooseMatView(SparseMatrix<Number> & mat)
{
PetscMatrix<Number> & petsc_mat = static_cast<PetscMatrix<Number> &>(mat);
auto ierr = MatView(petsc_mat.mat(), 0);
LIBMESH_CHKERR(ierr);
LibmeshPetscCallA(mat.comm().get(), MatView(petsc_mat.mat(), 0));
}

void
MooseVecView(const NumericVector<Number> & vector)
{
PetscVector<Number> & petsc_vec =
static_cast<PetscVector<Number> &>(const_cast<NumericVector<Number> &>(vector));
auto ierr = VecView(petsc_vec.vec(), 0);
LIBMESH_CHKERR(ierr);
LibmeshPetscCallA(vector.comm().get(), VecView(petsc_vec.vec(), 0));
}

void
MooseMatView(const SparseMatrix<Number> & mat)
{
PetscMatrix<Number> & petsc_mat =
static_cast<PetscMatrix<Number> &>(const_cast<SparseMatrix<Number> &>(mat));
auto ierr = MatView(petsc_mat.mat(), 0);
LIBMESH_CHKERR(ierr);
LibmeshPetscCallA(mat.comm().get(), MatView(petsc_mat.mat(), 0));
}

namespace Moose
Expand Down
36 changes: 18 additions & 18 deletions framework/src/utils/SlepcEigenSolverConfiguration.C
Original file line number Diff line number Diff line change
Expand Up @@ -28,41 +28,41 @@ SlepcEigenSolverConfiguration::SlepcEigenSolverConfiguration(
void
SlepcEigenSolverConfiguration::configure_solver()
{
auto ierr = (PetscErrorCode)0;

if (_eigen_problem.isNonlinearEigenvalueSolver())
{
// Set custom monitors for SNES and KSP
_eigen_problem.initPetscOutputAndSomeSolverSettings();
// Let us remove extra "eps_power" from SNES since users do not like it
ierr = Moose::SlepcSupport::mooseSlepcEPSSNESSetUpOptionPrefix(_slepc_solver.eps());
LIBMESH_CHKERR(ierr);
LibmeshPetscCallA(_eigen_problem.comm().get(),
Moose::SlepcSupport::mooseSlepcEPSSNESSetUpOptionPrefix(_slepc_solver.eps()));
// Let us hook up a customize PC if users ask. Users still can use PETSc options to override
// this setting
if (_eigen_problem.solverParams()._customized_pc_for_eigen)
{
ierr = Moose::SlepcSupport::mooseSlepcEPSSNESSetCustomizePC(_slepc_solver.eps());
LIBMESH_CHKERR(ierr);
}
LibmeshPetscCallA(_eigen_problem.comm().get(),
Moose::SlepcSupport::mooseSlepcEPSSNESSetCustomizePC(_slepc_solver.eps()));

// Let set a default PC side. I would like to have the setting be consistent with
// what we do in regular nonlinear executioner. Petsc options are able to override
// this setting
ierr = Moose::SlepcSupport::mooseSlepcEPSSNESKSPSetPCSide(_eigen_problem, _slepc_solver.eps());
LIBMESH_CHKERR(ierr);
LibmeshPetscCallA(
_eigen_problem.comm().get(),
Moose::SlepcSupport::mooseSlepcEPSSNESKSPSetPCSide(_eigen_problem, _slepc_solver.eps()));
// A customized stopping test for nonlinear free power iterations.
// Nonlinear power iterations need to be marked as converged in EPS to
// retrieve solution from SLEPc EPS.
ierr = EPSSetStoppingTestFunction(
_slepc_solver.eps(), Moose::SlepcSupport::mooseSlepcStoppingTest, &_eigen_problem, NULL);
LIBMESH_CHKERR(ierr);
LibmeshPetscCallA(_eigen_problem.comm().get(),
EPSSetStoppingTestFunction(_slepc_solver.eps(),
Moose::SlepcSupport::mooseSlepcStoppingTest,
&_eigen_problem,
NULL));

// Remove all SLEPc monitors.
ierr = EPSMonitorCancel(_slepc_solver.eps());
LIBMESH_CHKERR(ierr);
LibmeshPetscCallA(_eigen_problem.comm().get(), EPSMonitorCancel(_slepc_solver.eps()));
// A customized EPS monitor in moose. We need to print only eigenvalue
ierr = EPSMonitorSet(
_slepc_solver.eps(), Moose::SlepcSupport::mooseSlepcEPSMonitor, &_eigen_problem, NULL);
LIBMESH_CHKERR(ierr);
LibmeshPetscCallA(
_eigen_problem.comm().get(),
EPSMonitorSet(
_slepc_solver.eps(), Moose::SlepcSupport::mooseSlepcEPSMonitor, &_eigen_problem, NULL));
}
}

Expand Down
78 changes: 40 additions & 38 deletions framework/src/utils/SlepcSupport.C
Original file line number Diff line number Diff line change
Expand Up @@ -1000,51 +1000,54 @@ attachCallbacksToMat(EigenProblem & eigen_problem, Mat mat, bool eigen)
// Attach the Jacobian computation function. If \p mat is the "eigen" matrix corresponding to B,
// then attach our JacobianB computation routine, else the matrix corresponds to A, and we attach
// the JacobianA computation routine
auto ierr = PetscObjectComposeFunction((PetscObject)mat,
"formJacobian",
eigen ? Moose::SlepcSupport::mooseSlepcEigenFormJacobianB
: Moose::SlepcSupport::mooseSlepcEigenFormJacobianA);
LIBMESH_CHKERR(ierr);
LibmeshPetscCallA(
eigen_problem.comm().get(),
PetscObjectComposeFunction((PetscObject)mat,
"formJacobian",
eigen ? Moose::SlepcSupport::mooseSlepcEigenFormJacobianB
: Moose::SlepcSupport::mooseSlepcEigenFormJacobianA));

// Attach the residual computation function. If \p mat is the "eigen" matrix corresponding to B,
// then attach our FunctionB computation routine, else the matrix corresponds to A, and we attach
// the FunctionA computation routine
ierr = PetscObjectComposeFunction((PetscObject)mat,
"formFunction",
eigen ? Moose::SlepcSupport::mooseSlepcEigenFormFunctionB
: Moose::SlepcSupport::mooseSlepcEigenFormFunctionA);
LIBMESH_CHKERR(ierr);
LibmeshPetscCallA(
eigen_problem.comm().get(),
PetscObjectComposeFunction((PetscObject)mat,
"formFunction",
eigen ? Moose::SlepcSupport::mooseSlepcEigenFormFunctionB
: Moose::SlepcSupport::mooseSlepcEigenFormFunctionA));

// It's also beneficial to be able to evaluate both A and B residuals at once
ierr = PetscObjectComposeFunction(
(PetscObject)mat, "formFunctionAB", Moose::SlepcSupport::mooseSlepcEigenFormFunctionAB);
LIBMESH_CHKERR(ierr);
LibmeshPetscCallA(eigen_problem.comm().get(),
PetscObjectComposeFunction((PetscObject)mat,
"formFunctionAB",
Moose::SlepcSupport::mooseSlepcEigenFormFunctionAB));

// Users may choose to provide a custom measure of the norm of B (Bx for a linear system)
if (eigen_problem.bxNormProvided())
{
ierr = PetscObjectComposeFunction(
(PetscObject)mat, "formNorm", Moose::SlepcSupport::mooseSlepcEigenFormNorm);
LIBMESH_CHKERR(ierr);
}
LibmeshPetscCallA(eigen_problem.comm().get(),
PetscObjectComposeFunction((PetscObject)mat,
"formNorm",
Moose::SlepcSupport::mooseSlepcEigenFormNorm));

// Finally we need to attach the "context" object, which is our EigenProblem, to the matrices so
// that eventually when we get callbacks from SLEPc we can call methods on the EigenProblem
PetscContainer container;
ierr = PetscContainerCreate(eigen_problem.comm().get(), &container);
LIBMESH_CHKERR(ierr);
ierr = PetscContainerSetPointer(container, &eigen_problem);
LIBMESH_CHKERR(ierr);
ierr = PetscObjectCompose((PetscObject)mat, "formJacobianCtx", (PetscObject)container);
LIBMESH_CHKERR(ierr);
ierr = PetscObjectCompose((PetscObject)mat, "formFunctionCtx", (PetscObject)container);
LibmeshPetscCallA(eigen_problem.comm().get(),
PetscContainerCreate(eigen_problem.comm().get(), &container));
LibmeshPetscCallA(eigen_problem.comm().get(),
PetscContainerSetPointer(container, &eigen_problem));
LibmeshPetscCallA(
eigen_problem.comm().get(),
PetscObjectCompose((PetscObject)mat, "formJacobianCtx", (PetscObject)container));
LibmeshPetscCallA(
eigen_problem.comm().get(),
PetscObjectCompose((PetscObject)mat, "formFunctionCtx", (PetscObject)container));
if (eigen_problem.bxNormProvided())
{
ierr = PetscObjectCompose((PetscObject)mat, "formNormCtx", (PetscObject)container);
LIBMESH_CHKERR(ierr);
}
ierr = PetscContainerDestroy(&container);
LIBMESH_CHKERR(ierr);
LibmeshPetscCallA(eigen_problem.comm().get(),
PetscObjectCompose((PetscObject)mat, "formNormCtx", (PetscObject)container));

LibmeshPetscCallA(eigen_problem.comm().get(), PetscContainerDestroy(&container));
}

PetscErrorCode
Expand Down Expand Up @@ -1089,13 +1092,12 @@ mooseMatMult_NonEigen(Mat mat, Vec x, Vec r)
void
setOperationsForShellMat(EigenProblem & eigen_problem, Mat mat, bool eigen)
{
auto ierr = MatShellSetContext(mat, &eigen_problem);
LIBMESH_CHKERR(ierr);
ierr = MatShellSetOperation(mat,
MATOP_MULT,
eigen ? (void (*)(void))mooseMatMult_Eigen
: (void (*)(void))mooseMatMult_NonEigen);
LIBMESH_CHKERR(ierr);
LibmeshPetscCallA(eigen_problem.comm().get(), MatShellSetContext(mat, &eigen_problem));
LibmeshPetscCallA(eigen_problem.comm().get(),
MatShellSetOperation(mat,
MATOP_MULT,
eigen ? (void (*)(void))mooseMatMult_Eigen
: (void (*)(void))mooseMatMult_NonEigen));
}

PETSC_EXTERN PetscErrorCode
Expand Down
89 changes: 41 additions & 48 deletions modules/external_petsc_solver/src/mesh/PETScDMDAMesh.C
Original file line number Diff line number Diff line change
Expand Up @@ -91,38 +91,36 @@ add_element_Quad4(DM da,
// xp: number of processors in x direction
// yp: number of processors in y direction
PetscInt Mx, My, xp, yp;
auto ierr = DMDAGetInfo(da,
PETSC_IGNORE,
&Mx,
&My,
PETSC_IGNORE,
&xp,
&yp,
PETSC_IGNORE,
PETSC_IGNORE,
PETSC_IGNORE,
PETSC_IGNORE,
PETSC_IGNORE,
PETSC_IGNORE,
PETSC_IGNORE);
LIBMESH_CHKERR(ierr);
LibmeshPetscCallA(mesh.comm().get(),
DMDAGetInfo(da,
PETSC_IGNORE,
&Mx,
&My,
PETSC_IGNORE,
&xp,
&yp,
PETSC_IGNORE,
PETSC_IGNORE,
PETSC_IGNORE,
PETSC_IGNORE,
PETSC_IGNORE,
PETSC_IGNORE,
PETSC_IGNORE));

const PetscInt *lx, *ly;
PetscInt *lxo, *lyo;
// PETSc-3.8.x or older use PetscDataType
#if PETSC_VERSION_LESS_THAN(3, 9, 0)
ierr = DMGetWorkArray(da, xp + yp + 2, PETSC_INT, &lxo);
LibmeshPetscCallA(mesh.comm().get(), DMGetWorkArray(da, xp + yp + 2, PETSC_INT, &lxo));
#else
// PETSc-3.9.x or newer use MPI_DataType
ierr = DMGetWorkArray(da, xp + yp + 2, MPIU_INT, &lxo);
LibmeshPetscCallA(mesh.comm().get(), DMGetWorkArray(da, xp + yp + 2, MPIU_INT, &lxo));
#endif
LIBMESH_CHKERR(ierr);

// Gets the ranges of indices in the x, y and z direction that are owned by each process
// Ranges here are different from what we have in Mat and Vec.
// It means how many points each processor holds
ierr = DMDAGetOwnershipRanges(da, &lx, &ly, NULL);
LIBMESH_CHKERR(ierr);
LibmeshPetscCallA(mesh.comm().get(), DMDAGetOwnershipRanges(da, &lx, &ly, NULL));
lxo[0] = 0;
for (PetscInt i = 0; i < xp; i++)
lxo[i + 1] = lxo[i] + lx[i];
Expand All @@ -137,31 +135,26 @@ add_element_Quad4(DM da,
// Finds integer in a sorted array of integers
// Loc: the location if found, otherwise -(slot+1)
// where slot is the place the value would go
ierr = PetscFindInt(i, xp + 1, lxo, &xpid);
LIBMESH_CHKERR(ierr);
LibmeshPetscCallA(mesh.comm().get(), PetscFindInt(i, xp + 1, lxo, &xpid));

xpid = xpid < 0 ? -xpid - 1 - 1 : xpid;

ierr = PetscFindInt(i + 1, xp + 1, lxo, &xpidplus);
LIBMESH_CHKERR(ierr);
LibmeshPetscCallA(mesh.comm().get(), PetscFindInt(i + 1, xp + 1, lxo, &xpidplus));

xpidplus = xpidplus < 0 ? -xpidplus - 1 - 1 : xpidplus;

ierr = PetscFindInt(j, yp + 1, lyo, &ypid);
LIBMESH_CHKERR(ierr);
LibmeshPetscCallA(mesh.comm().get(), PetscFindInt(j, yp + 1, lyo, &ypid));

ypid = ypid < 0 ? -ypid - 1 - 1 : ypid;

ierr = PetscFindInt(j + 1, yp + 1, lyo, &ypidplus);
LIBMESH_CHKERR(ierr);
LibmeshPetscCallA(mesh.comm().get(), PetscFindInt(j + 1, yp + 1, lyo, &ypidplus));

ypidplus = ypidplus < 0 ? -ypidplus - 1 - 1 : ypidplus;
#if PETSC_VERSION_LESS_THAN(3, 9, 0)
ierr = DMRestoreWorkArray(da, xp + yp + 2, PETSC_INT, &lxo);
LibmeshPetscCallA(mesh.comm().get(), DMRestoreWorkArray(da, xp + yp + 2, PETSC_INT, &lxo));
#else
ierr = DMRestoreWorkArray(da, xp + yp + 2, MPIU_INT, &lxo);
LibmeshPetscCallA(mesh.comm().get(), DMRestoreWorkArray(da, xp + yp + 2, MPIU_INT, &lxo));
#endif
LIBMESH_CHKERR(ierr);

// Bottom Left
auto node0_ptr = mesh.add_point(Point(static_cast<Real>(i) / nx, static_cast<Real>(j) / ny, 0),
Expand Down Expand Up @@ -347,23 +340,23 @@ build_cube_Quad4(UnstructuredMesh & mesh, DM da)
PetscInt xs, ys, xm, ym, Mx, My, xp, yp;

/* Get local grid boundaries */
auto ierr = DMDAGetCorners(da, &xs, &ys, PETSC_IGNORE, &xm, &ym, PETSC_IGNORE);
LIBMESH_CHKERR(ierr);
ierr = DMDAGetInfo(da,
PETSC_IGNORE,
&Mx,
&My,
PETSC_IGNORE,
&xp,
&yp,
PETSC_IGNORE,
PETSC_IGNORE,
PETSC_IGNORE,
PETSC_IGNORE,
PETSC_IGNORE,
PETSC_IGNORE,
PETSC_IGNORE);
LIBMESH_CHKERR(ierr);
LibmeshPetscCallA(mesh.comm().get(),
DMDAGetCorners(da, &xs, &ys, PETSC_IGNORE, &xm, &ym, PETSC_IGNORE));
LibmeshPetscCallA(mesh.comm().get(),
DMDAGetInfo(da,
PETSC_IGNORE,
&Mx,
&My,
PETSC_IGNORE,
&xp,
&yp,
PETSC_IGNORE,
PETSC_IGNORE,
PETSC_IGNORE,
PETSC_IGNORE,
PETSC_IGNORE,
PETSC_IGNORE,
PETSC_IGNORE));

for (PetscInt j = ys; j < ys + ym; j++)
for (PetscInt i = xs; i < xs + xm; i++)
Expand Down
Loading

0 comments on commit 922ac4d

Please sign in to comment.