Skip to content

Commit

Permalink
Merge pull request #65 from gridap/adding_mumps_petsc_functions
Browse files Browse the repository at this point in the history
Adding mumps petsc functions
  • Loading branch information
fverdugo authored Jan 24, 2022
2 parents 309e139 + ef06f6a commit 7b62503
Show file tree
Hide file tree
Showing 9 changed files with 186 additions and 14 deletions.
64 changes: 64 additions & 0 deletions .github/workflows/ci_extra.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
name: CI_EXTRA
on: [push, pull_request]
jobs:
test:
name: Tests ${{ matrix.version }} - ${{ matrix.os }} - ${{ matrix.arch }} - ${{ github.event_name }}
runs-on: ${{ matrix.os }}
env:
JULIA_MPI_BINARY: "system"
JULIA_PETSC_LIBRARY: "/opt/petsc/3.15.4/lib/libpetsc"
strategy:
fail-fast: false
matrix:
version:
- '1.6'
os:
- ubuntu-latest
arch:
- x64
steps:
- uses: actions/checkout@v2
- name: Cache petsc
id: cache-petsc
uses: actions/cache@v2
with:
path: ${{env.JULIA_PETSC_LIBRARY}}
key: ${{ runner.os }}-build-${{ env.JULIA_PETSC_LIBRARY }}-
restore-keys: |
${{ runner.os }}-build-${{ env.JULIA_PETSC_LIBRARY }}-
${{ runner.os }}-build-
${{ runner.os }}-
- uses: julia-actions/setup-julia@v1
with:
version: ${{ matrix.version }}
arch: ${{ matrix.arch }}
- name: Install petsc dependencies
run: |
sudo apt-get update
sudo apt-get install -y wget gfortran g++ openmpi-bin libopenmpi-dev
- name: Install petsc
##if: steps.cache-petsc.outputs.cache-hit != 'true'
run: |
CURR_DIR=$(pwd)
PACKAGE=petsc
VERSION=3.15.4
INSTALL_ROOT=/opt
PETSC_INSTALL=$INSTALL_ROOT/$PACKAGE/$VERSION
TAR_FILE=$PACKAGE-$VERSION.tar.gz
URL="https://ftp.mcs.anl.gov/pub/petsc/release-snapshots/"
ROOT_DIR=/tmp
SOURCES_DIR=$ROOT_DIR/$PACKAGE-$VERSION
BUILD_DIR=$SOURCES_DIR/build
wget -q $URL/$TAR_FILE -O $ROOT_DIR/$TAR_FILE
mkdir -p $SOURCES_DIR
tar xzf $ROOT_DIR/$TAR_FILE -C $SOURCES_DIR --strip-components=1
cd $SOURCES_DIR
./configure --prefix=$PETSC_INSTALL --with-cc=mpicc --with-cxx=mpicxx --with-fc=mpif90 \
--download-mumps --download-scalapack --download-parmetis --download-metis \
--download-ptscotch --with-debugging --with-x=0 --with-shared=1 \
--with-mpi=1 --with-64-bit-indices
make
make install
- run: julia --project=. -e 'using Pkg; Pkg.instantiate(); Pkg.build(); Pkg.precompile()'
- run: julia --project=. --color=yes --check-bounds=yes test/sequential/runtests.jl
- run: julia --project=. --color=yes --check-bounds=yes test/mpi/runtests.jl
5 changes: 4 additions & 1 deletion src/GridapPETSc.jl
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,10 @@ function __init__()
libpetsc_handle[] = PETSc_jll.libpetsc_handle
end
for (handle,sym) in _PRELOADS
handle[] = Libdl.dlsym(libpetsc_handle[],sym)
_handle = Libdl.dlsym(libpetsc_handle[],sym;throw_error=false)
if _handle !== nothing
handle[] = _handle
end
end
end

Expand Down
28 changes: 28 additions & 0 deletions src/PETSC.jl
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ module PETSC
using Libdl
using GridapPETSc: libpetsc_handle
using GridapPETSc: _PRELOADS
using Gridap.Helpers: @check
using MPI

include("Config.jl")
Expand Down Expand Up @@ -59,6 +60,7 @@ macro wrapper(fn,rt,argts,args,url)
push!(_PRELOADS,($hn,$fn))
@doc $str
@inline function $(fn.value)($(args.args...))
@check $(hn)[] != C_NULL "Missing symbol. Re-configure and compile PETSc."
ccall($(hn)[],$rt,$argts,$(args.args...))
end
end
Expand Down Expand Up @@ -417,6 +419,24 @@ const MATLMVMDIAGBROYDEN = "lmvmdiagbroyden"
const MATCONSTANTDIAGONAL = "constantdiagonal"
const MATHARA = "hara"

const MATSOLVERSUPERLU = "superlu"
const MATSOLVERSUPERLU_DIST = "superlu_dist"
const MATSOLVERSTRUMPACK = "strumpack"
const MATSOLVERUMFPACK = "umfpack"
const MATSOLVERCHOLMOD = "cholmod"
const MATSOLVERKLU = "klu"
const MATSOLVERSPARSEELEMENTAL = "sparseelemental"
const MATSOLVERELEMENTAL = "elemental"
const MATSOLVERESSL = "essl"
const MATSOLVERLUSOL = "lusol"
const MATSOLVERMUMPS = "mumps"
const MATSOLVERMKL_PARDISO = "mkl_pardiso"
const MATSOLVERMKL_CPARDISO = "mkl_cpardiso"
const MATSOLVERPASTIX = "pastix"
const MATSOLVERMATLAB = "matlab"
const MATSOLVERPETSC = "petsc"
const MATSOLVERCUSPARSE = "cusparse"

"""
Julia alias for the `Mat` C type.
Expand Down Expand Up @@ -448,6 +468,8 @@ Base.convert(::Type{Mat},p::Ptr{Cvoid}) = Mat(p)
@wrapper(:MatZeroEntries,PetscErrorCode,(Mat,),(mat,),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Mat/MatZeroEntries.html")
@wrapper(:MatCopy,PetscErrorCode,(Mat,Mat,MatStructure),(A,B,str),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Mat/MatCopy.html")
@wrapper(:MatSetBlockSize,PetscErrorCode,(Mat,PetscInt),(mat,bs),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Mat/MatSetBlockSize.html")
@wrapper(:MatMumpsSetIcntl,PetscErrorCode,(Mat,PetscInt,PetscInt),(mat,icntl,val),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Mat/MatMumpsSetIcntl.html")
@wrapper(:MatMumpsSetCntl,PetscErrorCode,(Mat,PetscInt,PetscReal),(mat,icntl,val),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Mat/MatMumpsSetCntl.html")

# Null space related

Expand Down Expand Up @@ -615,6 +637,11 @@ Base.convert(::Type{PC},p::Ptr{Cvoid}) = PC(p)
@wrapper(:KSPSetType,PetscErrorCode,(KSP,KSPType),(ksp,typ),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/KSP/KSPSetType.html")
@wrapper(:KSPGetPC,PetscErrorCode,(KSP,Ptr{PC}),(ksp,pc),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/KSP/KSPGetPC.html")
@wrapper(:PCSetType,PetscErrorCode,(PC,PCType),(pc,typ),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/PC/PCSetType.html")
@wrapper(:PCView,PetscErrorCode,(PC,PetscViewer),(pc,viewer),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/PC/PCView.html")
@wrapper(:PCFactorSetMatSolverType,PetscErrorCode,(PC,PCType),(pc,typ),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/PC/PCFactorSetMatSolverType.html")
@wrapper(:PCFactorSetUpMatSolverType,PetscErrorCode,(PC,),(pc,),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/PC/PCFactorSetUpMatSolverType.html")
@wrapper(:PCFactorGetMatrix,PetscErrorCode,(PC,Ptr{Mat}),(ksp,mat),"https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/PC/PCFactorGetMatrix.html")


"""
Julia alias for the `SNES` C type.
Expand Down Expand Up @@ -658,6 +685,7 @@ const SNESPATCH = "patch"
@wrapper(:SNESSetFromOptions,PetscErrorCode,(SNES,),(snes,),"https://petsc.org/release/docs/manualpages/SNES/SNESSetFromOptions.html")
@wrapper(:SNESView,PetscErrorCode,(SNES,PetscViewer),(snes,viewer),"https://petsc.org/release/docs/manualpages/SNES/SNESView.html")
@wrapper(:SNESSetType,PetscErrorCode,(SNES,SNESType),(snes,type),"https://petsc.org/release/docs/manualpages/SNES/SNESSetType.html")
@wrapper(:SNESGetKSP,PetscErrorCode,(SNES,Ptr{KSP}),(snes,ksp),"https://petsc.org/release/docs/manualpages/SNES/SNESGetKSP.html")
# Garbage collection of PETSc objects
@wrapper(:PetscObjectRegisterDestroy,PetscErrorCode,(Ptr{Cvoid},),(obj,),"https://petsc.org/release/docs/manualpages/Sys/PetscObjectRegisterDestroy.html")
@wrapper(:PetscObjectRegisterDestroyAll,PetscErrorCode,(),(),"https://petsc.org/release/docs/manualpages/Sys/PetscObjectRegisterDestroyAll.html")
Expand Down
2 changes: 1 addition & 1 deletion src/PETScLinearSolvers.jl
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,8 @@ function Algebra.numerical_setup(ss::PETScLinearSolverSS,A::AbstractMatrix)
B = convert(PETScMatrix,A)
ns = PETScLinearSolverNS(A,B)
@check_error_code PETSC.KSPCreate(B.comm,ns.ksp)
ss.solver.setup(ns.ksp)
@check_error_code PETSC.KSPSetOperators(ns.ksp[],ns.B.mat[],ns.B.mat[])
ss.solver.setup(ns.ksp)
@check_error_code PETSC.KSPSetUp(ns.ksp[])
Init(ns)
end
Expand Down
3 changes: 2 additions & 1 deletion src/PETScNonlinearSolvers.jl
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,6 @@ function _setup_cache(x::AbstractVector,nls::PETScNonlinearSolver,op::NonlinearO

snes_ref=Ref{SNES}()
@check_error_code PETSC.SNESCreate(jac_petsc_mat_A.comm,snes_ref)
nls.setup(snes_ref)

PETScNonlinearSolverCache(jac_petsc_mat_A.comm, snes_ref, op, x, x_sys_layout, res_sys_layout,
jac_mat_A, jac_mat_A,
Expand Down Expand Up @@ -181,6 +180,8 @@ function Algebra.solve!(x::AbstractVector,nls::PETScNonlinearSolver,op::Nonlinea
fptr = @cfunction(snes_jacobian, PetscInt, (Ptr{Cvoid}, Ptr{Cvoid}, Ptr{Cvoid}, Ptr{Cvoid},Ptr{Cvoid}))
PETSC.SNESSetJacobian(cache.snes[],cache.jac_petsc_mat_A.mat[],cache.jac_petsc_mat_A.mat[],fptr,ctx)

nls.setup(cache.snes)

@check_error_code PETSC.SNESSolve(cache.snes[],C_NULL,cache.x_petsc.vec[])
copy!(x,cache.x_petsc)
cache
Expand Down
52 changes: 45 additions & 7 deletions test/PLaplacianTests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,18 +3,52 @@ using Gridap.Algebra
using GridapDistributed
using PartitionedArrays
using GridapPETSc
using GridapPETSc: PETSC
using Test


function mysnessetup(snes)
ksp = Ref{GridapPETSc.PETSC.KSP}()
pc = Ref{GridapPETSc.PETSC.PC}()
mumpsmat = Ref{GridapPETSc.PETSC.Mat}()
@check_error_code GridapPETSc.PETSC.SNESSetFromOptions(snes[])
@check_error_code GridapPETSc.PETSC.SNESGetKSP(snes[],ksp)
@check_error_code GridapPETSc.PETSC.KSPView(ksp[],C_NULL)
@check_error_code GridapPETSc.PETSC.KSPSetType(ksp[],GridapPETSc.PETSC.KSPPREONLY)
@check_error_code GridapPETSc.PETSC.KSPGetPC(ksp[],pc)
@check_error_code GridapPETSc.PETSC.PCView(pc[],C_NULL)
@check_error_code GridapPETSc.PETSC.PCSetType(pc[],GridapPETSc.PETSC.PCLU)
@check_error_code GridapPETSc.PETSC.PCFactorSetMatSolverType(pc[],GridapPETSc.PETSC.MATSOLVERMUMPS)
@check_error_code GridapPETSc.PETSC.PCFactorSetUpMatSolverType(pc[])
@check_error_code GridapPETSc.PETSC.PCFactorGetMatrix(pc[],mumpsmat)
@check_error_code GridapPETSc.PETSC.MatMumpsSetIcntl(mumpsmat[], 4, 2)
@check_error_code GridapPETSc.PETSC.MatMumpsSetIcntl(mumpsmat[], 28, 2)
@check_error_code GridapPETSc.PETSC.MatMumpsSetIcntl(mumpsmat[], 29, 2)
@check_error_code GridapPETSc.PETSC.MatMumpsSetCntl(mumpsmat[], 3, 1.0e-6)
end

function main(parts)
options = "-snes_type newtonls -snes_linesearch_type basic -snes_linesearch_damping 1.0 -snes_rtol 1.0e-14 -snes_atol 0.0 -snes_monitor -pc_type jacobi -ksp_type gmres -ksp_monitor -snes_converged_reason"
main(parts,:gmres)
if PETSC.MatMumpsSetIcntl_handle[] != C_NULL
main(parts,:mumps)
end
end

function main(parts,solver)
if solver == :mumps
options = "-snes_type newtonls -snes_linesearch_type basic -snes_linesearch_damping 1.0 -snes_rtol 1.0e-14 -snes_atol 0.0 -snes_monitor -snes_converged_reason"
elseif solver == :gmres
options = "-snes_type newtonls -snes_linesearch_type basic -snes_linesearch_damping 1.0 -snes_rtol 1.0e-14 -snes_atol 0.0 -snes_monitor -pc_type jacobi -ksp_type gmres -ksp_monitor -snes_converged_reason"
else
error()
end
GridapPETSc.with(args=split(options)) do
main(parts,FullyAssembledRows())
main(parts,SubAssembledRows())
main(parts,solver,FullyAssembledRows())
main(parts,solver,SubAssembledRows())
end
end

function main(parts,strategy)
function main(parts,solver,strategy)

domain = (0,4,0,4)
cells = (100,100)
Expand Down Expand Up @@ -47,9 +81,13 @@ function main(parts,strategy)
# fill!(x,1)
# @test (norm(A*x-_A*x)+1) ≈ 1

nls = PETScNonlinearSolver()
solver = FESolver(nls)
uh = solve(solver,op)
if solver == :mumps
nls = PETScNonlinearSolver(mysnessetup)
else
nls = PETScNonlinearSolver()
end
fesolver = FESolver(nls)
uh = solve(fesolver,op)

Ωo = Triangulation(model)
dΩo = Measure(Ωo,2*k)
Expand Down
39 changes: 37 additions & 2 deletions test/PoissonTests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,42 @@ using Gridap.Algebra
using Gridap.FESpaces
using GridapDistributed
using GridapPETSc
using GridapPETSc: PETSC
using PartitionedArrays
using Test

# Setup solver via low level PETSC API calls
function mykspsetup(ksp)
pc = Ref{GridapPETSc.PETSC.PC}()
mumpsmat = Ref{GridapPETSc.PETSC.Mat}()
@check_error_code GridapPETSc.PETSC.KSPSetType(ksp[],GridapPETSc.PETSC.KSPPREONLY)
@check_error_code GridapPETSc.PETSC.KSPGetPC(ksp[],pc)
@check_error_code GridapPETSc.PETSC.PCSetType(pc[],GridapPETSc.PETSC.PCLU)
@check_error_code GridapPETSc.PETSC.PCFactorSetMatSolverType(pc[],GridapPETSc.PETSC.MATSOLVERMUMPS)
@check_error_code GridapPETSc.PETSC.PCFactorSetUpMatSolverType(pc[])
@check_error_code GridapPETSc.PETSC.PCFactorGetMatrix(pc[],mumpsmat)
@check_error_code GridapPETSc.PETSC.MatMumpsSetIcntl(mumpsmat[], 4, 2)
@check_error_code GridapPETSc.PETSC.MatMumpsSetIcntl(mumpsmat[], 28, 2)
@check_error_code GridapPETSc.PETSC.MatMumpsSetIcntl(mumpsmat[], 29, 2)
@check_error_code GridapPETSc.PETSC.MatMumpsSetCntl(mumpsmat[], 3, 1.0e-6)
@check_error_code GridapPETSc.PETSC.KSPSetFromOptions(ksp[])
end

function main(parts)
options = "-info -pc_type jacobi -ksp_type cg -ksp_monitor -ksp_rtol 1.0e-12"
main(parts,:cg)
if PETSC.MatMumpsSetIcntl_handle[] != C_NULL
main(parts,:mumps)
end
end

function main(parts,solver)
if solver == :mumps
options = "-info -ksp_error_if_not_converged true"
elseif solver == :cg
options = "-info -pc_type jacobi -ksp_type cg -ksp_monitor -ksp_rtol 1.0e-12"
else
error()
end
GridapPETSc.with(args=split(options)) do
domain = (0,4,0,4)
cells = (4,4)
Expand Down Expand Up @@ -39,7 +70,11 @@ function main(parts)
assem=SparseMatrixAssembler(SparseMatrixCSR{0,PetscScalar,PetscInt},Vector{Float64},U,V)
op = AffineFEOperator(a,l,U,V,assem)

ls = PETScLinearSolver()
if solver == :mumps
ls = PETScLinearSolver(mykspsetup)
else
ls = PETScLinearSolver()
end
fels = LinearFESolver(ls)
uh = solve(fels,op)
eh = u - uh
Expand Down
1 change: 0 additions & 1 deletion test/mpi/PoissonTests.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
include("../PoissonTests.jl")
nparts = (2,2)
prun(main,mpi,nparts)
prun(main,mpi,nparts)
6 changes: 5 additions & 1 deletion test/mpi/mpiexec.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,11 @@ function run_mpi_driver(;procs,file)
testdir = joinpath(mpidir,"..")
repodir = joinpath(testdir,"..")
mpiexec() do cmd
run(`$cmd -n $procs $(Base.julia_cmd()) --project=$repodir $(joinpath(mpidir,file))`)
if MPI.MPI_LIBRARY == MPI.OpenMPI
run(`$cmd -n $procs --allow-run-as-root --oversubscribe $(Base.julia_cmd()) --project=$repodir $(joinpath(mpidir,file))`)
else
run(`$cmd -n $procs $(Base.julia_cmd()) --project=$repodir $(joinpath(mpidir,file))`)
end
@test true
end
end

0 comments on commit 7b62503

Please sign in to comment.