diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c9524b8..f2ad258 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,74 +1,40 @@ name: CI on: [push, pull_request] jobs: - Tests: + test: name: Tests ${{ matrix.version }} - ${{ matrix.os }} - ${{ matrix.arch }} - ${{ github.event_name }} runs-on: ${{ matrix.os }} - env: - JULIA_MPI_BINARY: "system" - JULIA_PETSC_LIBRARY: "/opt/petsc/3.15.4/lib/libpetsc" strategy: fail-fast: false matrix: version: - '1.6' os: - - ubuntu-latest + - ubuntu-18.04 arch: - x64 steps: - uses: actions/checkout@v2 - - name: Cache petsc - id: cache-petsc - uses: actions/cache@v2 - with: - path: ${{env.JULIA_PETSC_LIBRARY}} - key: ${{ runner.os }}-build-${{ env.JULIA_PETSC_LIBRARY }}- - restore-keys: | - ${{ runner.os }}-build-${{ env.JULIA_PETSC_LIBRARY }}- - ${{ runner.os }}-build- - ${{ runner.os }}- - uses: julia-actions/setup-julia@v1 with: version: ${{ matrix.version }} arch: ${{ matrix.arch }} - - name: Install petsc dependencies - run: | - sudo apt-get update - sudo apt-get install -y wget gfortran g++ openmpi-bin libopenmpi-dev - - name: Install petsc - if: steps.cache-petsc.outputs.cache-hit != 'true' - run: | - # Install p4est 2.2 from sources - CURR_DIR=$(pwd) - PACKAGE=petsc - VERSION=3.15.4 - INSTALL_ROOT=/opt - PETSC_INSTALL=$INSTALL_ROOT/$PACKAGE/$VERSION - TAR_FILE=$PACKAGE-$VERSION.tar.gz - URL="https://ftp.mcs.anl.gov/pub/petsc/release-snapshots/" - ROOT_DIR=/tmp - SOURCES_DIR=$ROOT_DIR/$PACKAGE-$VERSION - BUILD_DIR=$SOURCES_DIR/build - wget -q $URL/$TAR_FILE -O $ROOT_DIR/$TAR_FILE - mkdir -p $SOURCES_DIR - tar xzf $ROOT_DIR/$TAR_FILE -C $SOURCES_DIR --strip-components=1 - cd $SOURCES_DIR - ./configure --prefix=$PETSC_INSTALL --with-cc=mpicc --with-cxx=mpicxx --with-fc=mpif90 \ - --download-mumps --download-scalapack --download-parmetis --download-metis \ - --download-ptscotch --with-debugging --with-x=0 --with-shared=1 \ - --with-mpi=1 --with-64-bit-indices - make - make install - - run: julia --project=. -e 'using Pkg; Pkg.instantiate(); Pkg.build(); Pkg.precompile()' - - run: julia --project=. --color=yes --check-bounds=yes test/sequential/runtests.jl - - run: julia --project=. --color=yes --check-bounds=yes test/mpi/runtests.jl + - uses: actions/cache@v1 + env: + cache-name: cache-artifacts + with: + path: ~/.julia/artifacts + key: ${{ runner.os }}-test-${{ env.cache-name }}-${{ hashFiles('**/Project.toml') }} + restore-keys: | + ${{ runner.os }}-test-${{ env.cache-name }}- + ${{ runner.os }}-test- + ${{ runner.os }}- + - uses: julia-actions/julia-buildpkg@v1 + - uses: julia-actions/julia-runtest@v1 - uses: julia-actions/julia-processcoverage@v1 - uses: codecov/codecov-action@v1 - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: - file: lcov.info + file: lcov.info docs: name: Documentation runs-on: ubuntu-latest diff --git a/.github/workflows/ci_extra.yml b/.github/workflows/ci_extra.yml new file mode 100644 index 0000000..1f814f5 --- /dev/null +++ b/.github/workflows/ci_extra.yml @@ -0,0 +1,64 @@ +name: CI_EXTRA +on: [push, pull_request] +jobs: + test: + name: Tests ${{ matrix.version }} - ${{ matrix.os }} - ${{ matrix.arch }} - ${{ github.event_name }} + runs-on: ${{ matrix.os }} + env: + JULIA_MPI_BINARY: "system" + JULIA_PETSC_LIBRARY: "/opt/petsc/3.15.4/lib/libpetsc" + strategy: + fail-fast: false + matrix: + version: + - '1.6' + os: + - ubuntu-latest + arch: + - x64 + steps: + - uses: actions/checkout@v2 + - name: Cache petsc + id: cache-petsc + uses: actions/cache@v2 + with: + path: ${{env.JULIA_PETSC_LIBRARY}} + key: ${{ runner.os }}-build-${{ env.JULIA_PETSC_LIBRARY }}- + restore-keys: | + ${{ runner.os }}-build-${{ env.JULIA_PETSC_LIBRARY }}- + ${{ runner.os }}-build- + ${{ runner.os }}- + - uses: julia-actions/setup-julia@v1 + with: + version: ${{ matrix.version }} + arch: ${{ matrix.arch }} + - name: Install petsc dependencies + run: | + sudo apt-get update + sudo apt-get install -y wget gfortran g++ openmpi-bin libopenmpi-dev + - name: Install petsc + ##if: steps.cache-petsc.outputs.cache-hit != 'true' + run: | + CURR_DIR=$(pwd) + PACKAGE=petsc + VERSION=3.15.4 + INSTALL_ROOT=/opt + PETSC_INSTALL=$INSTALL_ROOT/$PACKAGE/$VERSION + TAR_FILE=$PACKAGE-$VERSION.tar.gz + URL="https://ftp.mcs.anl.gov/pub/petsc/release-snapshots/" + ROOT_DIR=/tmp + SOURCES_DIR=$ROOT_DIR/$PACKAGE-$VERSION + BUILD_DIR=$SOURCES_DIR/build + wget -q $URL/$TAR_FILE -O $ROOT_DIR/$TAR_FILE + mkdir -p $SOURCES_DIR + tar xzf $ROOT_DIR/$TAR_FILE -C $SOURCES_DIR --strip-components=1 + cd $SOURCES_DIR + ./configure --prefix=$PETSC_INSTALL --with-cc=mpicc --with-cxx=mpicxx --with-fc=mpif90 \ + --download-mumps --download-scalapack --download-parmetis --download-metis \ + --download-ptscotch --with-debugging --with-x=0 --with-shared=1 \ + --with-mpi=1 --with-64-bit-indices + make + make install + - run: julia --project=. -e 'using Pkg; Pkg.instantiate(); Pkg.build(); Pkg.precompile()' + - run: julia --project=. --color=yes --check-bounds=yes test/sequential/runtests.jl + - run: julia --project=. --color=yes --check-bounds=yes test/mpi/runtests.jl diff --git a/Project.toml b/Project.toml index 836d6e0..c8d0d9b 100644 --- a/Project.toml +++ b/Project.toml @@ -21,7 +21,7 @@ GridapDistributed = "0.2" MPI = "0.14, 0.15, 0.16, 0.17, 0.18, 0.19" PETSc_jll = "3.13" PartitionedArrays = "0.2.4" -SparseMatricesCSR = "0.6.1" +SparseMatricesCSR = "0.6.6" julia = "1.3" [extras] diff --git a/src/Environment.jl b/src/Environment.jl index 474d2aa..f642c75 100644 --- a/src/Environment.jl +++ b/src/Environment.jl @@ -35,8 +35,9 @@ end function with(f;kwargs...) Init(;kwargs...) - f() + out = f() Finalize() + out end # In an MPI environment context, diff --git a/src/GridapPETSc.jl b/src/GridapPETSc.jl index dbdd563..defef13 100644 --- a/src/GridapPETSc.jl +++ b/src/GridapPETSc.jl @@ -59,7 +59,10 @@ function __init__() libpetsc_handle[] = PETSc_jll.libpetsc_handle end for (handle,sym) in _PRELOADS - handle[] = Libdl.dlsym(libpetsc_handle[],sym) + _handle = Libdl.dlsym(libpetsc_handle[],sym;throw_error=false) + if _handle !== nothing + handle[] = _handle + end end end diff --git a/src/PETSC.jl b/src/PETSC.jl index 22cf185..bafe44c 100644 --- a/src/PETSC.jl +++ b/src/PETSC.jl @@ -18,6 +18,7 @@ module PETSC using Libdl using GridapPETSc: libpetsc_handle using GridapPETSc: _PRELOADS +using Gridap.Helpers: @check using MPI include("Config.jl") @@ -59,6 +60,7 @@ macro wrapper(fn,rt,argts,args,url) push!(_PRELOADS,($hn,$fn)) @doc $str @inline function $(fn.value)($(args.args...)) + @check $(hn)[] != C_NULL "Missing symbol. Re-configure and compile PETSc." ccall($(hn)[],$rt,$argts,$(args.args...)) end end diff --git a/src/PETScArrays.jl b/src/PETScArrays.jl index 298898e..58232c8 100644 --- a/src/PETScArrays.jl +++ b/src/PETScArrays.jl @@ -24,7 +24,7 @@ function Finalize(a::PETScVector) if a.comm == MPI.COMM_SELF @check_error_code PETSC.VecDestroy(a.vec) else - @check_error_code PETSC.PetscObjectRegisterDestroy(a.vec[].ptr) + @check_error_code PETSC.PetscObjectRegisterDestroy(a.vec[]) end a.initialized = false @assert Threads.threadid() == 1 @@ -120,7 +120,7 @@ function _copy!(a::Vector,b::Vec) ni = length(a) ix = collect(PetscInt,0:(ni-1)) v = convert(Vector{PetscScalar},a) - @check_error_code PETSC.VecGetValues(b.ptr,ni,ix,v) + @check_error_code PETSC.VecGetValues(b,ni,ix,v) if !(v === a) a .= v end @@ -135,12 +135,12 @@ function _copy!(a::Vec,b::Vector) ni = length(b) ix = collect(PetscInt,0:(ni-1)) v = convert(Vector{PetscScalar},b) - @check_error_code PETSC.VecSetValues(a.ptr,ni,ix,v,PETSC.INSERT_VALUES) + @check_error_code PETSC.VecSetValues(a,ni,ix,v,PETSC.INSERT_VALUES) end function _get_local_oh_vector(a::Vec) v=PETScVector(MPI.COMM_SELF) - @check_error_code PETSC.VecGhostGetLocalForm(a.ptr,v.vec) + @check_error_code PETSC.VecGhostGetLocalForm(a,v.vec) if v.vec[] != C_NULL # a is a ghosted vector v.ownership=a Init(v) @@ -198,7 +198,7 @@ function Finalize(a::PETScMatrix) if a.comm == MPI.COMM_SELF @check_error_code PETSC.MatDestroy(a.mat) else - @check_error_code PETSC.PetscObjectRegisterDestroy(a.mat[].ptr) + @check_error_code PETSC.PetscObjectRegisterDestroy(a.mat[]) end a.initialized = false @assert Threads.threadid() == 1 @@ -308,7 +308,7 @@ function _copy!(petscmat::Mat,mat::Matrix) for i=1:size(mat)[1] row[1]=PetscInt(i-1) vals .= view(mat,i,:) - PETSC.MatSetValues(petscmat.ptr, + PETSC.MatSetValues(petscmat, PetscInt(1), row, n, @@ -316,12 +316,46 @@ function _copy!(petscmat::Mat,mat::Matrix) vals, PETSC.INSERT_VALUES) end - @check_error_code PETSC.MatAssemblyBegin(petscmat.ptr, PETSC.MAT_FINAL_ASSEMBLY) - @check_error_code PETSC.MatAssemblyEnd(petscmat.ptr, PETSC.MAT_FINAL_ASSEMBLY) + @check_error_code PETSC.MatAssemblyBegin(petscmat, PETSC.MAT_FINAL_ASSEMBLY) + @check_error_code PETSC.MatAssemblyEnd(petscmat, PETSC.MAT_FINAL_ASSEMBLY) +end + +function _copy!(petscmat::Mat,mat::AbstractSparseMatrix) + Tm = SparseMatrixCSR{0,PetscScalar,PetscInt} + csr = convert(Tm,mat) + ia = csr.rowptr + ja = csr.colval + a = csr.nzval + m = csr.m + n = csr.n + maxnnz = maximum( ia[i+1]-ia[i] for i=1:m ) + row = Vector{PetscInt}(undef,1) + cols = Vector{PetscInt}(undef,maxnnz) + for i=1:size(mat,1) + row[1]=PetscInt(i-1) + current=1 + for j=ia[i]+1:ia[i+1] + col=ja[j]+1 + cols[current]=PetscInt(col-1) + current=current+1 + end + vals = view(a,ia[i]+1:ia[i+1]) + PETSC.MatSetValues( + petscmat, + PetscInt(1), + row, + ia[i+1]-ia[i], + cols, + vals, + PETSC.INSERT_VALUES) + end + @check_error_code PETSC.MatAssemblyBegin(petscmat, PETSC.MAT_FINAL_ASSEMBLY) + @check_error_code PETSC.MatAssemblyEnd(petscmat, PETSC.MAT_FINAL_ASSEMBLY) end + function Base.convert(::Type{PETScMatrix},a::PETScMatrix) a end diff --git a/test/PLaplacianTests.jl b/test/PLaplacianTests.jl index fa306b6..057873c 100644 --- a/test/PLaplacianTests.jl +++ b/test/PLaplacianTests.jl @@ -3,6 +3,7 @@ using Gridap.Algebra using GridapDistributed using PartitionedArrays using GridapPETSc +using GridapPETSc: PETSC using Test @@ -26,17 +27,28 @@ function mysnessetup(snes) @check_error_code GridapPETSc.PETSC.MatMumpsSetCntl(mumpsmat[], 3, 1.0e-6) end - function main(parts) - options = "-snes_type newtonls -snes_linesearch_type basic -snes_linesearch_damping 1.0 -snes_rtol 1.0e-14 -snes_atol 0.0 -snes_monitor -snes_converged_reason" + main(parts,:gmres) + if PETSC.MatMumpsSetIcntl_handle[] != C_NULL + main(parts,:mumps) + end +end +function main(parts,solver) + if solver == :mumps + options = "-snes_type newtonls -snes_linesearch_type basic -snes_linesearch_damping 1.0 -snes_rtol 1.0e-14 -snes_atol 0.0 -snes_monitor -snes_converged_reason" + elseif solver == :gmres + options = "-snes_type newtonls -snes_linesearch_type basic -snes_linesearch_damping 1.0 -snes_rtol 1.0e-14 -snes_atol 0.0 -snes_monitor -pc_type jacobi -ksp_type gmres -ksp_monitor -snes_converged_reason" + else + error() + end GridapPETSc.with(args=split(options)) do - main(parts,FullyAssembledRows()) - main(parts,SubAssembledRows()) + main(parts,solver,FullyAssembledRows()) + main(parts,solver,SubAssembledRows()) end end -function main(parts,strategy) +function main(parts,solver,strategy) domain = (0,4,0,4) cells = (100,100) @@ -69,9 +81,13 @@ function main(parts,strategy) # fill!(x,1) # @test (norm(A*x-_A*x)+1) ≈ 1 - nls = PETScNonlinearSolver(mysnessetup) - solver = FESolver(nls) - uh = solve(solver,op) + if solver == :mumps + nls = PETScNonlinearSolver(mysnessetup) + else + nls = PETScNonlinearSolver() + end + fesolver = FESolver(nls) + uh = solve(fesolver,op) Ωo = Triangulation(model) dΩo = Measure(Ωo,2*k) diff --git a/test/PoissonTests.jl b/test/PoissonTests.jl index 3de7ac7..4100a7a 100644 --- a/test/PoissonTests.jl +++ b/test/PoissonTests.jl @@ -1,8 +1,10 @@ +using SparseMatricesCSR using Gridap using Gridap.Algebra using Gridap.FESpaces using GridapDistributed using GridapPETSc +using GridapPETSc: PETSC using PartitionedArrays using Test @@ -24,7 +26,20 @@ function mykspsetup(ksp) end function main(parts) - options = "-info -ksp_error_if_not_converged true" + main(parts,:cg) + if PETSC.MatMumpsSetIcntl_handle[] != C_NULL + main(parts,:mumps) + end +end + +function main(parts,solver) + if solver == :mumps + options = "-info -ksp_error_if_not_converged true" + elseif solver == :cg + options = "-info -pc_type jacobi -ksp_type cg -ksp_monitor -ksp_rtol 1.0e-12" + else + error() + end GridapPETSc.with(args=split(options)) do domain = (0,4,0,4) cells = (4,4) @@ -52,9 +67,14 @@ function main(parts) a(u,v) = ∫( ∇(v)⋅∇(u) )dΩ l(v) = ∫( v*f )dΩ + ∫( v*g )dΓn - op = AffineFEOperator(a,l,U,V) + assem=SparseMatrixAssembler(SparseMatrixCSR{0,PetscScalar,PetscInt},Vector{Float64},U,V) + op = AffineFEOperator(a,l,U,V,assem) - ls = PETScLinearSolver(mykspsetup) + if solver == :mumps + ls = PETScLinearSolver(mykspsetup) + else + ls = PETScLinearSolver() + end fels = LinearFESolver(ls) uh = solve(fels,op) eh = u - uh diff --git a/test/mpi/mpiexec.jl b/test/mpi/mpiexec.jl index b3d322b..0eb518c 100644 --- a/test/mpi/mpiexec.jl +++ b/test/mpi/mpiexec.jl @@ -5,7 +5,11 @@ function run_mpi_driver(;procs,file) testdir = joinpath(mpidir,"..") repodir = joinpath(testdir,"..") mpiexec() do cmd - run(`$cmd -n $procs --allow-run-as-root --oversubscribe $(Base.julia_cmd()) --project=$repodir $(joinpath(mpidir,file))`) + if MPI.MPI_LIBRARY == MPI.OpenMPI + run(`$cmd -n $procs --allow-run-as-root --oversubscribe $(Base.julia_cmd()) --project=$repodir $(joinpath(mpidir,file))`) + else + run(`$cmd -n $procs $(Base.julia_cmd()) --project=$repodir $(joinpath(mpidir,file))`) + end @test true end end diff --git a/test/sequential/PETScArraysTests.jl b/test/sequential/PETScArraysTests.jl index 676ee41..77080d4 100644 --- a/test/sequential/PETScArraysTests.jl +++ b/test/sequential/PETScArraysTests.jl @@ -8,7 +8,8 @@ using GridapPETSc: PetscScalar, PetscInt using LinearAlgebra options = "-info" -GridapPETSc.with(args=split(options)) do +out_1 = "some output" +out_2 = GridapPETSc.with(args=split(options)) do n = 10 v = PETScVector(n) @@ -90,6 +91,9 @@ GridapPETSc.with(args=split(options)) do @test typeof(C*aj) == PETScVector @test C*ap == C*aj + out_1 end +@test out_1 === out_2 + end # module diff --git a/test/sequential/PLaplacianDriver.jl b/test/sequential/PLaplacianDriver.jl new file mode 100644 index 0000000..df0a5f1 --- /dev/null +++ b/test/sequential/PLaplacianDriver.jl @@ -0,0 +1,56 @@ +module PLaplacianDriver + +using Test +using Gridap +using Gridap.FESpaces +using GridapPETSc +using GridapPETSc: PetscScalar, PetscInt +using SparseArrays +using SparseMatricesCSR + +options = "-snes_type newtonls -snes_rtol 1.0e-14 -snes_atol 0.0 -snes_monitor -ksp_converged_reason -pc_type ilu -ksp_type gmres -snes_converged_reason" + +msg = "some output" + +out = GridapPETSc.with(args=split(options)) do + + domain = (0,4,0,4) + cells = (100,100) + model = CartesianDiscreteModel(domain,cells) + + k = 1 + u((x,y)) = (x+y)^k + σ(∇u) =(1.0+∇u⋅∇u)*∇u + dσ(∇du,∇u) = 2*∇u⋅∇du + (1.0+∇u⋅∇u)*∇du + f(x) = -divergence(y->σ(∇(u,y)),x) + + Ω = Triangulation(model) + dΩ = Measure(Ω,2*k) + r(u,v) = ∫( ∇(v)⋅(σ∘∇(u)) - v*f )dΩ + j(u,du,v) = ∫( ∇(v)⋅(dσ∘(∇(du),∇(u))) )dΩ + + reffe = ReferenceFE(lagrangian,Float64,k) + V = TestFESpace(model,reffe,dirichlet_tags="boundary") + U = TrialFESpace(u,V) + + nls = PETScNonlinearSolver() + + op = FEOperator(r,j,U,V) + uh = solve(nls,op) + eh = u - uh + @test sqrt(sum(∫( abs2(eh) )dΩ)) < 1.0e-9 + + Tm = SparseMatrixCSR{0,PetscScalar,PetscInt} + Tv = Vector{PetscScalar} + assem = SparseMatrixAssembler(Tm,Tv,U,V) + op = FEOperator(r,j,U,V,assem) + uh = solve(nls,op) + eh = u - uh + @test sqrt(sum(∫( abs2(eh) )dΩ)) < 1.0e-9 + + msg +end + +@test out === msg + +end # module diff --git a/test/sequential/runtests.jl b/test/sequential/runtests.jl index 29ed044..38796a8 100644 --- a/test/sequential/runtests.jl +++ b/test/sequential/runtests.jl @@ -18,6 +18,8 @@ using Test @time @testset "ElasticityDriver" begin include("ElasticityDriver.jl") end +@time @testset "PLaplacianDriver" begin include("PLaplacianDriver.jl") end + end # module