From ed335b7b5606a9fba890b5070929d6bdbb760cd3 Mon Sep 17 00:00:00 2001 From: amartin Date: Thu, 11 Nov 2021 20:10:51 +1100 Subject: [PATCH 01/13] Moving snes Petsc object to Nonlinearsolver PETSc --- src/PETScNonlinearSolvers.jl | 52 ++++++++++++++---------------------- 1 file changed, 20 insertions(+), 32 deletions(-) diff --git a/src/PETScNonlinearSolvers.jl b/src/PETScNonlinearSolvers.jl index 4a0eb4c..5321fc6 100644 --- a/src/PETScNonlinearSolvers.jl +++ b/src/PETScNonlinearSolvers.jl @@ -1,12 +1,12 @@ -mutable struct PETScNonlinearSolver <: NonlinearSolver +mutable struct PETScNonlinearSolver{F} <: NonlinearSolver + setup::F comm::MPI.Comm - snes::Ref{SNES} - initialized::Bool end mutable struct PETScNonlinearSolverCache{A,B,C,D} initialized::Bool + snes::Ref{SNES} op::NonlinearOperator # Julia LA data structures @@ -21,13 +21,15 @@ mutable struct PETScNonlinearSolverCache{A,B,C,D} jac_petsc_mat_A::D jac_petsc_mat_P::D - function PETScNonlinearSolverCache(op::NonlinearOperator,x_julia_vec::A,res_julia_vec::A, + function PETScNonlinearSolverCache(snes::Ref{SNES}, op::NonlinearOperator, + x_julia_vec::A,res_julia_vec::A, jac_julia_mat_A::B,jac_julia_mat_P::B, x_petsc_vec::C,res_petsc_vec::C, jac_petsc_mat_A::D,jac_petsc_mat_P::D) where {A,B,C,D} - cache=new{A,B,C,D}(true,op,x_julia_vec,res_julia_vec,jac_julia_mat_A,jac_julia_mat_P, - x_petsc_vec,res_petsc_vec,jac_petsc_mat_A,jac_petsc_mat_P) + cache=new{A,B,C,D}(true, snes, op, + x_julia_vec, res_julia_vec, jac_julia_mat_A, jac_julia_mat_P, + x_petsc_vec, res_petsc_vec, jac_petsc_mat_A, jac_petsc_mat_P) finalizer(Finalize,cache) end end @@ -71,27 +73,6 @@ function snes_jacobian(csnes:: Ptr{Cvoid}, return PetscInt(0) end -function PETScNonlinearSolver(setup,comm) - @assert Threads.threadid() == 1 - _NREFS[] += 1 - snes_ref=Ref{SNES}() - @check_error_code PETSC.SNESCreate(comm,snes_ref) - setup(snes_ref) - snes=PETScNonlinearSolver(comm,snes_ref,true) - finalizer(Finalize, snes) - snes -end - -function Finalize(nls::PETScNonlinearSolver) - if GridapPETSc.Initialized() && nls.initialized - @check_error_code PETSC.SNESDestroy(nls.snes) - @assert Threads.threadid() == 1 - nls.initialized=false - _NREFS[] -= 1 - end - nothing -end - function Finalize(cache::PETScNonlinearSolverCache) if GridapPETSc.Initialized() && cache.initialized GridapPETSc.Finalize(cache.x_petsc_vec) @@ -100,6 +81,7 @@ function Finalize(cache::PETScNonlinearSolverCache) if !(cache.jac_petsc_mat_P === cache.jac_petsc_mat_A) GridapPETSc.Finalize(cache.jac_petsc_mat_P) end + @check_error_code PETSC.SNESDestroy(cache.snes) cache.initialized=false end end @@ -116,16 +98,17 @@ PETScNonlinearSolver() = PETScNonlinearSolver(MPI.COMM_WORLD) function _set_petsc_residual_function!(nls::PETScNonlinearSolver, cache) ctx = pointer_from_objref(cache) fptr = @cfunction(snes_residual, PetscInt, (Ptr{Cvoid}, Ptr{Cvoid}, Ptr{Cvoid}, Ptr{Cvoid})) - PETSC.SNESSetFunction(nls.snes[],cache.res_petsc_vec.vec[],fptr,ctx) + PETSC.SNESSetFunction(cache.snes[],cache.res_petsc_vec.vec[],fptr,ctx) end function _set_petsc_jacobian_function!(nls::PETScNonlinearSolver, cache) ctx = pointer_from_objref(cache) fptr = @cfunction(snes_jacobian, PetscInt, (Ptr{Cvoid}, Ptr{Cvoid}, Ptr{Cvoid}, Ptr{Cvoid},Ptr{Cvoid})) - PETSC.SNESSetJacobian(nls.snes[],cache.jac_petsc_mat_A.mat[],cache.jac_petsc_mat_A.mat[],fptr,ctx) + PETSC.SNESSetJacobian(cache.snes[],cache.jac_petsc_mat_A.mat[],cache.jac_petsc_mat_A.mat[],fptr,ctx) end function _setup_cache(x::AbstractVector,nls::PETScNonlinearSolver,op::NonlinearOperator) + res_julia_vec, jac_julia_mat_A = residual_and_jacobian(op,x) res_petsc_vec = convert(PETScVector,res_julia_vec) jac_petsc_mat_A = convert(PETScMatrix,jac_julia_mat_A) @@ -143,7 +126,12 @@ function _setup_cache(x::AbstractVector,nls::PETScNonlinearSolver,op::NonlinearO x_julia_vec = similar(res_julia_vec,eltype(res_julia_vec),(axes(jac_julia_mat_A)[2],)) copy!(x_julia_vec,x) x_petsc_vec = convert(PETScVector,x_julia_vec) - PETScNonlinearSolverCache(op, x_julia_vec,res_julia_vec, + + snes_ref=Ref{SNES}() + @check_error_code PETSC.SNESCreate(nls.comm,snes_ref) + nls.setup(snes_ref) + + PETScNonlinearSolverCache(snes_ref, op, x_julia_vec,res_julia_vec, jac_julia_mat_A,jac_julia_mat_A, x_petsc_vec,res_petsc_vec, jac_petsc_mat_A, jac_petsc_mat_A) @@ -171,7 +159,7 @@ function Algebra.solve!(x::T, cache::PETScNonlinearSolverCache{<:T}) where T <: AbstractVector @assert cache.op === op - @check_error_code PETSC.SNESSolve(nls.snes[],C_NULL,cache.x_petsc_vec.vec[]) + @check_error_code PETSC.SNESSolve(cache.snes[],C_NULL,cache.x_petsc_vec.vec[]) _copy_and_exchange!(x,cache.x_petsc_vec) cache end @@ -180,7 +168,7 @@ function Algebra.solve!(x::AbstractVector,nls::PETScNonlinearSolver,op::Nonlinea cache=_setup_cache(x,nls,op) _set_petsc_residual_function!(nls,cache) _set_petsc_jacobian_function!(nls,cache) - @check_error_code PETSC.SNESSolve(nls.snes[],C_NULL,cache.x_petsc_vec.vec[]) + @check_error_code PETSC.SNESSolve(cache.snes[],C_NULL,cache.x_petsc_vec.vec[]) _copy_and_exchange!(x,cache.x_petsc_vec) cache end From 7ca0c733fc3f17355f33a0345881f057ece9804c Mon Sep 17 00:00:00 2001 From: amartin Date: Thu, 11 Nov 2021 20:18:54 +1100 Subject: [PATCH 02/13] Added ::Nothing to the solve! function for nonlinear petsc solver --- src/PETScNonlinearSolvers.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/PETScNonlinearSolvers.jl b/src/PETScNonlinearSolvers.jl index 5321fc6..54bf83c 100644 --- a/src/PETScNonlinearSolvers.jl +++ b/src/PETScNonlinearSolvers.jl @@ -164,7 +164,7 @@ function Algebra.solve!(x::T, cache end -function Algebra.solve!(x::AbstractVector,nls::PETScNonlinearSolver,op::NonlinearOperator) +function Algebra.solve!(x::AbstractVector,nls::PETScNonlinearSolver,op::NonlinearOperator,::Nothing) cache=_setup_cache(x,nls,op) _set_petsc_residual_function!(nls,cache) _set_petsc_jacobian_function!(nls,cache) From 3f47610cbb33adb248982311ae86e40a08a624e5 Mon Sep 17 00:00:00 2001 From: amartin Date: Thu, 11 Nov 2021 20:23:24 +1100 Subject: [PATCH 03/13] removing old finalize call --- test/sequential/PETScNonlinearSolversTests.jl | 1 - 1 file changed, 1 deletion(-) diff --git a/test/sequential/PETScNonlinearSolversTests.jl b/test/sequential/PETScNonlinearSolversTests.jl index 4374fda..c8b97a9 100644 --- a/test/sequential/PETScNonlinearSolversTests.jl +++ b/test/sequential/PETScNonlinearSolversTests.jl @@ -22,7 +22,6 @@ x0 = zero_initial_guess(op) cache = solve!(x0,nls,op) GridapPETSc.Finalize(cache) -GridapPETSc.Finalize(nls) GridapPETSc.Finalize() end # module From 2254650e8b8e3ba746a05fca5bbf11e19bd03a9c Mon Sep 17 00:00:00 2001 From: amartin Date: Fri, 12 Nov 2021 18:54:48 +1100 Subject: [PATCH 04/13] Keeping a reference to input vector x to snes.solve! with FE space layout in the nonlinear solver cache so that we avoid extra copies in residual! and jacobian! --- src/PETScNonlinearSolvers.jl | 101 +++++++++++++++++++---------------- 1 file changed, 56 insertions(+), 45 deletions(-) diff --git a/src/PETScNonlinearSolvers.jl b/src/PETScNonlinearSolvers.jl index 54bf83c..f818a88 100644 --- a/src/PETScNonlinearSolvers.jl +++ b/src/PETScNonlinearSolvers.jl @@ -4,32 +4,39 @@ mutable struct PETScNonlinearSolver{F} <: NonlinearSolver comm::MPI.Comm end -mutable struct PETScNonlinearSolverCache{A,B,C,D} +mutable struct PETScNonlinearSolverCache{A,B,C,D,E} initialized::Bool snes::Ref{SNES} op::NonlinearOperator + # The input vector to solve! + x_fe_space_layout::A + # Julia LA data structures - x_julia_vec::A - res_julia_vec::A - jac_julia_mat_A::B - jac_julia_mat_P::B + x_sys_layout::B + res_sys_layout::B + jac_mat_A::C + jac_mat_P::C # Counterpart PETSc data structures - x_petsc_vec::C - res_petsc_vec::C - jac_petsc_mat_A::D - jac_petsc_mat_P::D + x_petsc::D + res_petsc::D + jac_petsc_mat_A::E + jac_petsc_mat_P::E function PETScNonlinearSolverCache(snes::Ref{SNES}, op::NonlinearOperator, - x_julia_vec::A,res_julia_vec::A, - jac_julia_mat_A::B,jac_julia_mat_P::B, - x_petsc_vec::C,res_petsc_vec::C, - jac_petsc_mat_A::D,jac_petsc_mat_P::D) where {A,B,C,D} - - cache=new{A,B,C,D}(true, snes, op, - x_julia_vec, res_julia_vec, jac_julia_mat_A, jac_julia_mat_P, - x_petsc_vec, res_petsc_vec, jac_petsc_mat_A, jac_petsc_mat_P) + x_fe_space_layout::A, + x_sys_layout::B, res_sys_layout::B, + jac_mat_A::C, jac_mat_P::C, + x_petsc::D, res_petsc::D, + jac_petsc_mat_A::E, jac_petsc_mat_P::E) where {A,B,C,D,E} + cache=new{A,B,C,D,E}(true, + snes, op, + x_fe_space_layout, + x_sys_layout, res_sys_layout, + jac_mat_A, jac_mat_P, + x_petsc, res_petsc, + jac_petsc_mat_A, jac_petsc_mat_P) finalizer(Finalize,cache) end end @@ -40,14 +47,17 @@ function snes_residual(csnes::Ptr{Cvoid}, ctx::Ptr{Cvoid})::PetscInt cache = unsafe_pointer_to_objref(ctx) + println("residual") + # 1. Transfer cx to Julia data structures - copy!(cache.x_julia_vec, Vec(cx)) + copy!(cache.x_sys_layout, Vec(cx)) + copy!(cache.x_fe_space_layout,cache.x_sys_layout) # 2. Evaluate residual into Julia data structures - residual!(cache.res_julia_vec, cache.op, cache.x_julia_vec) + residual!(cache.res_sys_layout, cache.op, cache.x_fe_space_layout) # 3. Transfer Julia residual to PETSc residual (cfx) - copy!(Vec(cfx), cache.res_julia_vec) + copy!(Vec(cfx), cache.res_sys_layout) return PetscInt(0) end @@ -62,21 +72,22 @@ function snes_jacobian(csnes:: Ptr{Cvoid}, # 1. Transfer cx to Julia data structures # Extract pointer to array of values out of cx and put it in a PVector - copy!(cache.x_julia_vec, Vec(cx)) + copy!(cache.x_sys_layout, Vec(cx)) + copy!(cache.x_fe_space_layout,cache.x_sys_layout) - # 2. - jacobian!(cache.jac_julia_mat_A,cache.op,cache.x_julia_vec) + # 2. Evaluate Jacobian into Julia data structures + jacobian!(cache.jac_mat_A,cache.op,cache.x_fe_space_layout) - # 3. Transfer nls.jac_julia_mat_A/P to PETSc (cA/cP) - copy!(Mat(cA), cache.jac_julia_mat_A) + # 3. Transfer nls.jac_mat_A/P to PETSc (cA/cP) + copy!(Mat(cA), cache.jac_mat_A) return PetscInt(0) end function Finalize(cache::PETScNonlinearSolverCache) if GridapPETSc.Initialized() && cache.initialized - GridapPETSc.Finalize(cache.x_petsc_vec) - GridapPETSc.Finalize(cache.res_petsc_vec) + GridapPETSc.Finalize(cache.x_petsc) + GridapPETSc.Finalize(cache.res_petsc) GridapPETSc.Finalize(cache.jac_petsc_mat_A) if !(cache.jac_petsc_mat_P === cache.jac_petsc_mat_A) GridapPETSc.Finalize(cache.jac_petsc_mat_P) @@ -98,7 +109,7 @@ PETScNonlinearSolver() = PETScNonlinearSolver(MPI.COMM_WORLD) function _set_petsc_residual_function!(nls::PETScNonlinearSolver, cache) ctx = pointer_from_objref(cache) fptr = @cfunction(snes_residual, PetscInt, (Ptr{Cvoid}, Ptr{Cvoid}, Ptr{Cvoid}, Ptr{Cvoid})) - PETSC.SNESSetFunction(cache.snes[],cache.res_petsc_vec.vec[],fptr,ctx) + PETSC.SNESSetFunction(cache.snes[],cache.res_petsc.vec[],fptr,ctx) end function _set_petsc_jacobian_function!(nls::PETScNonlinearSolver, cache) @@ -109,31 +120,31 @@ end function _setup_cache(x::AbstractVector,nls::PETScNonlinearSolver,op::NonlinearOperator) - res_julia_vec, jac_julia_mat_A = residual_and_jacobian(op,x) - res_petsc_vec = convert(PETScVector,res_julia_vec) - jac_petsc_mat_A = convert(PETScMatrix,jac_julia_mat_A) + res_sys_layout, jac_mat_A = residual_and_jacobian(op,x) + res_petsc = convert(PETScVector,res_sys_layout) + jac_petsc_mat_A = convert(PETScMatrix,jac_mat_A) # In a parallel MPI context, x is a vector with a data layout typically different from - # the one of res_julia_vec. On the one hand, x holds the free dof values of a FE + # the one of res_sys_layout. On the one hand, x holds the free dof values of a FE # Function, and thus has the data layout of the FE space (i.e., local DOFs # include all DOFs touched by local cells, i.e., owned and ghost cells). - # On the other hand, res_petsc_vec has the data layout of the rows of the + # On the other hand, res_petsc has the data layout of the rows of the # distributed linear system (e.g., local DoFs only include those touched from owned # cells/facets during assembly, assuming the SubAssembledRows strategy). - # The following lines of code generate a version of x, namely, x_julia_vec, with the - # same data layout as the columns of jac_julia_mat_A, but the contents of x + # The following lines of code generate a version of x, namely, x_sys_layout, with the + # same data layout as the columns of jac_mat_A, but the contents of x # (for the owned dof values). - x_julia_vec = similar(res_julia_vec,eltype(res_julia_vec),(axes(jac_julia_mat_A)[2],)) - copy!(x_julia_vec,x) - x_petsc_vec = convert(PETScVector,x_julia_vec) + x_sys_layout = similar(res_sys_layout,eltype(res_sys_layout),(axes(jac_mat_A)[2],)) + copy!(x_sys_layout,x) + x_petsc = convert(PETScVector,x_sys_layout) snes_ref=Ref{SNES}() @check_error_code PETSC.SNESCreate(nls.comm,snes_ref) nls.setup(snes_ref) - PETScNonlinearSolverCache(snes_ref, op, x_julia_vec,res_julia_vec, - jac_julia_mat_A,jac_julia_mat_A, - x_petsc_vec,res_petsc_vec, + PETScNonlinearSolverCache(snes_ref, op, x, x_sys_layout, res_sys_layout, + jac_mat_A, jac_mat_A, + x_petsc, res_petsc, jac_petsc_mat_A, jac_petsc_mat_A) end @@ -159,8 +170,8 @@ function Algebra.solve!(x::T, cache::PETScNonlinearSolverCache{<:T}) where T <: AbstractVector @assert cache.op === op - @check_error_code PETSC.SNESSolve(cache.snes[],C_NULL,cache.x_petsc_vec.vec[]) - _copy_and_exchange!(x,cache.x_petsc_vec) + @check_error_code PETSC.SNESSolve(cache.snes[],C_NULL,cache.x_petsc.vec[]) + _copy_and_exchange!(x,cache.x_petsc) cache end @@ -168,7 +179,7 @@ function Algebra.solve!(x::AbstractVector,nls::PETScNonlinearSolver,op::Nonlinea cache=_setup_cache(x,nls,op) _set_petsc_residual_function!(nls,cache) _set_petsc_jacobian_function!(nls,cache) - @check_error_code PETSC.SNESSolve(cache.snes[],C_NULL,cache.x_petsc_vec.vec[]) - _copy_and_exchange!(x,cache.x_petsc_vec) + @check_error_code PETSC.SNESSolve(cache.snes[],C_NULL,cache.x_petsc.vec[]) + _copy_and_exchange!(x,cache.x_petsc) cache end From fc17101b584ba7d8f085aae2318972edaee93a86 Mon Sep 17 00:00:00 2001 From: amartin Date: Fri, 12 Nov 2021 22:28:39 +1100 Subject: [PATCH 05/13] Reimplemented copy! for petsvec and abstract vectors combination. The previous variant was causing a nasty error message with petsc built in debug mode. [0]PETSC ERROR: #3 VecSetErrorIfLocked() at /home/amartin/software_installers/petsc-3.15.4-build-dbg-gnu9-mpi/include/petscvec.h:574 [0]PETSC ERROR: #4 VecGetArray() at /home/amartin/software_installers/petsc-3.15.4-build-dbg-gnu9-mpi/src/vec/vec/interface/rvector.c:1795 PETScNonLinearSolvers: Error During Test at /home/amartin/git-repos/GridapPETSc.jl/test/sequential/runtests.jl:13 Got exception outside of a @test LoadError: Petsc returned with error code: 73 Stacktrace: [1] macro expansion @ ~/git-repos/GridapPETSc.jl/src/Config.jl:88 [inlined] [2] get_local_vector(a::GridapPETSc.PETScVector) @ GridapPETSc ~/git-repos/GridapPETSc.jl/src/PETScArrays.jl:176 [3] copy!(vec::Vector{Float64}, petscvec::GridapPETSc.PETScVector) @ GridapPETSc ~/git-repos/GridapPETSc.jl/src/PETScArrays.jl:124 [4] copy!(a::Vector{Float64}, petsc_vec::GridapPETSc.PETSC.Vec) @ GridapPETSc ~/git-repos/GridapPETSc.jl/src/PETScArrays.jl:112 [5] snes_residual(csnes::Ptr{Nothing}, cx::Ptr{Nothing}, cfx::Ptr{Nothing}, ctx::Ptr{Nothing}) @ GridapPETSc ~/git-repos/GridapPETSc.jl/src/PETScNonlinearSolvers.jl:42 [6] SNESSolve @ ~/git-repos/GridapPETSc.jl/src/PETSC.jl:62 [inlined] [7] macro expansion @ ~/git-repos/GridapPETSc.jl/src/Config.jl:86 [inlined] [8] solve!(x::Vector{Float64}, nls::GridapPETSc.PETScNonlinearSolver, op::Gridap.Algebra.NonlinearOperatorMock) @ GridapPETSc ~/git-repos/GridapPETSc.jl/src/PETScNonlinearSolvers.jl:183 [9] test_nonlinear_solver(nls::GridapPETSc.PETScNonlinearSolver, op::Gridap.Algebra.NonlinearOperatorMock, x0::Vector{Float64}, x::Vector{Float64}, pred::typeof(isapprox)) @ Gridap.Algebra ~/.julia/packages/Gridap/AfDIn/src/Algebra/NonlinearSolvers.jl:62 [10] test_nonlinear_solver(nls::GridapPETSc.PETScNonlinearSolver, op::Gridap.Algebra.NonlinearOperatorMock, x0::Vector{Float64}, x::Vector{Float64}) @ Gridap.Algebra ~/.julia/packages/Gridap/AfDIn/src/Algebra/NonlinearSolvers.jl:61 [11] top-level scope @ ~/git-repos/GridapPETSc.jl/test/sequential/PETScNonlinearSolversTests.jl:15 [12] include(mod::Module, _path::String) @ Base ./Base.jl:386 [13] include @ ~/git-repos/GridapPETSc.jl/test/sequential/runtests.jl:1 [inlined] [14] macro expansion @ ~/git-repos/GridapPETSc.jl/test/sequential/runtests.jl:13 [inlined] [15] macro expansion @ /buildworker/worker/package_linux64/build/usr/share/julia/stdlib/v1.6/Test/src/Test.jl:1151 [inlined] [16] macro expansion @ ~/git-repos/GridapPETSc.jl/test/sequential/runtests.jl:13 [inlined] [17] top-level scope @ ./timing.jl:210 [inlined] [18] top-level scope @ ~/git-repos/GridapPETSc.jl/test/sequential/runtests.jl:0 [19] include(fname::String) @ Base.MainInclude ./client.jl:444 [20] top-level scope @ REPL[3]:1 [21] eval @ ./boot.jl:360 [inlined] [22] eval @ ./Base.jl:39 [inlined] [23] repleval(m::Module, code::Expr, #unused#::String) @ VSCodeServer ~/.vscode/extensions/julialang.language-julia-1.4.3/scripts/packages/VSCodeServer/src/repl.jl:157 [24] (::VSCodeServer.var"#69#71"{Module, Expr, REPL.LineEditREPL, REPL.LineEdit.Prompt})() @ VSCodeServer ~/.vscode/extensions/julialang.language-julia-1.4.3/scripts/packages/VSCodeServer/src/repl.jl:123 [25] with_logstate(f::Function, logstate::Any) @ Base.CoreLogging ./logging.jl:491 [26] with_logger @ ./logging.jl:603 [inlined] [27] (::VSCodeServer.var"#68#70"{Module, Expr, REPL.LineEditREPL, REPL.LineEdit.Prompt})() @ VSCodeServer ~/.vscode/extensions/julialang.language-julia-1.4.3/scripts/packages/VSCodeServer/src/repl.jl:124 [28] #invokelatest#2 @ ./essentials.jl:708 [inlined] [29] invokelatest(::Any) @ Base ./essentials.jl:706 [30] macro expansion @ ~/.vscode/extensions/julialang.language-julia-1.4.3/scripts/packages/VSCodeServer/src/eval.jl:34 [inlined] [31] (::VSCodeServer.var"#53#54")() @ VSCodeServer ./task.jl:411 in expression starting at /home/amartin/git-repos/GridapPETSc.jl/test/sequential/PETScNonlinearSolversTests.jl:1 Test Summary: | Error Total PETScNonLinearSolvers | 1 1 ERROR: LoadError: Some tests did not pass: 0 passed, 0 failed, 1 errored, 0 broken. in expression starting at /home/amartin/git-repos/GridapPETSc.jl/test/sequential/runtests.jl:1 --- Manifest.toml | 82 +++++++++++++++++++----------------- src/PETScArrays.jl | 50 ++++++---------------- src/PETScNonlinearSolvers.jl | 2 - 3 files changed, 56 insertions(+), 78 deletions(-) diff --git a/Manifest.toml b/Manifest.toml index 74f1fa0..082ee85 100644 --- a/Manifest.toml +++ b/Manifest.toml @@ -10,9 +10,9 @@ uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f" [[ArrayInterface]] deps = ["Compat", "IfElse", "LinearAlgebra", "Requires", "SparseArrays", "Static"] -git-tree-sha1 = "a8101545d6b15ff1ebc927e877e28b0ab4bc4f16" +git-tree-sha1 = "e527b258413e0c6d4f66ade574744c94edef81f8" uuid = "4fba245c-0d91-5ea0-9b3e-6abc04ee57a9" -version = "3.1.36" +version = "3.1.40" [[ArrayLayouts]] deps = ["FillArrays", "LinearAlgebra", "SparseArrays"] @@ -39,9 +39,15 @@ version = "0.16.9" [[ChainRulesCore]] deps = ["Compat", "LinearAlgebra", "SparseArrays"] -git-tree-sha1 = "3533f5a691e60601fe60c90d8bc47a27aa2907ec" +git-tree-sha1 = "f885e7e7c124f8c92650d61b9477b9ac2ee607dd" uuid = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" -version = "1.11.0" +version = "1.11.1" + +[[ChangesOfVariables]] +deps = ["LinearAlgebra", "Test"] +git-tree-sha1 = "9a1d594397670492219635b35a3d830b04730d62" +uuid = "9e997f8a-9a97-42d5-a9f1-ce6bfc15e2c0" +version = "0.1.1" [[CodecZlib]] deps = ["TranscodingStreams", "Zlib_jll"] @@ -62,9 +68,9 @@ version = "0.3.0" [[Compat]] deps = ["Base64", "Dates", "DelimitedFiles", "Distributed", "InteractiveUtils", "LibGit2", "Libdl", "LinearAlgebra", "Markdown", "Mmap", "Pkg", "Printf", "REPL", "Random", "SHA", "Serialization", "SharedArrays", "Sockets", "SparseArrays", "Statistics", "Test", "UUIDs", "Unicode"] -git-tree-sha1 = "31d0151f5716b655421d9d75b7fa74cc4e744df2" +git-tree-sha1 = "dce3e3fea680869eaa0b774b2e8343e9ff442313" uuid = "34da2185-b29b-5c13-b0c7-acf172513d20" -version = "3.39.0" +version = "3.40.0" [[CompilerSupportLibraries_jll]] deps = ["Artifacts", "Libdl"] @@ -91,16 +97,16 @@ uuid = "163ba53b-c6d8-5494-b064-1a9d43ac40c5" version = "1.0.3" [[DiffRules]] -deps = ["NaNMath", "Random", "SpecialFunctions"] -git-tree-sha1 = "7220bc21c33e990c14f4a9a319b1d242ebc5b269" +deps = ["LogExpFunctions", "NaNMath", "Random", "SpecialFunctions"] +git-tree-sha1 = "3287dacf67c3652d3fed09f4c12c187ae4dbb89a" uuid = "b552c78f-8df3-52c6-915a-8e097449b14b" -version = "1.3.1" +version = "1.4.0" [[Distances]] deps = ["LinearAlgebra", "Statistics", "StatsAPI"] -git-tree-sha1 = "09d9eaef9ef719d2cd5d928a191dc95be2ec8059" +git-tree-sha1 = "837c83e5574582e07662bbbba733964ff7c26b9d" uuid = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" -version = "0.10.5" +version = "0.10.6" [[Distributed]] deps = ["Random", "Serialization", "Sockets"] @@ -124,9 +130,9 @@ version = "0.4.7" [[FileIO]] deps = ["Pkg", "Requires", "UUIDs"] -git-tree-sha1 = "3c041d2ac0a52a12a27af2782b34900d9c3ee68c" +git-tree-sha1 = "2db648b6712831ecb333eae76dbfd1c156ca13bb" uuid = "5789e2e9-d7fb-5bc7-8068-2c6fae9b9549" -version = "1.11.1" +version = "1.11.2" [[FillArrays]] deps = ["LinearAlgebra", "Random", "SparseArrays", "Statistics"] @@ -141,29 +147,29 @@ uuid = "6a86dc24-6348-571c-b903-95158fe2bd41" version = "2.8.1" [[ForwardDiff]] -deps = ["CommonSubexpressions", "DiffResults", "DiffRules", "LinearAlgebra", "NaNMath", "Preferences", "Printf", "Random", "SpecialFunctions", "StaticArrays"] -git-tree-sha1 = "63777916efbcb0ab6173d09a658fb7f2783de485" +deps = ["CommonSubexpressions", "DiffResults", "DiffRules", "LinearAlgebra", "LogExpFunctions", "NaNMath", "Preferences", "Printf", "Random", "SpecialFunctions", "StaticArrays"] +git-tree-sha1 = "6406b5112809c08b1baa5703ad274e1dded0652f" uuid = "f6369f11-7733-5829-9624-2563aa707210" -version = "0.10.21" +version = "0.10.23" [[Gridap]] deps = ["AbstractTrees", "BSON", "BlockArrays", "Combinatorics", "DocStringExtensions", "FastGaussQuadrature", "FileIO", "FillArrays", "ForwardDiff", "JLD2", "JSON", "LineSearches", "LinearAlgebra", "NLsolve", "NearestNeighbors", "QuadGK", "Random", "SparseArrays", "SparseMatricesCSR", "StaticArrays", "Test", "WriteVTK"] -git-tree-sha1 = "db94b3f4e47315b2b9c4ccc6d54aff32829051dd" +git-tree-sha1 = "7619ecc085cc378d5ce7e173dfdec363feb76d23" uuid = "56d4f2e9-7ea1-5844-9cf6-b9c51ca7ce8e" -version = "0.17.3" +version = "0.17.5" [[GridapDistributed]] deps = ["FillArrays", "Gridap", "LinearAlgebra", "MPI", "PartitionedArrays", "SparseArrays", "WriteVTK"] -git-tree-sha1 = "ecca67da64453f9f838615ae84f9e840acf44296" +git-tree-sha1 = "712274a12c745b84be465bd5165b12d3d51bd955" repo-rev = "master" repo-url = "https://github.com/gridap/GridapDistributed.jl" uuid = "f9701e48-63b3-45aa-9a63-9bc6c271f355" version = "0.2.0" [[IfElse]] -git-tree-sha1 = "28e837ff3e7a6c3cdb252ce49fb412c8eb3caeef" +git-tree-sha1 = "debdd00ffef04665ccbb3e150747a77560e8fad1" uuid = "615f187c-cbe4-4ef1-ba3b-2fcf58d6d173" -version = "0.1.0" +version = "0.1.1" [[InteractiveUtils]] deps = ["Markdown"] @@ -171,9 +177,9 @@ uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" [[InverseFunctions]] deps = ["Test"] -git-tree-sha1 = "f0c6489b12d28fb4c2103073ec7452f3423bd308" +git-tree-sha1 = "a7254c0acd8e62f1ac75ad24d5db43f5f19f3c65" uuid = "3587e190-3f89-42d0-90ee-14403ec27112" -version = "0.1.1" +version = "0.1.2" [[IrrationalConstants]] git-tree-sha1 = "7fd44fd4ff43fc60815f8e764c0f352b83c49151" @@ -182,9 +188,9 @@ version = "0.1.1" [[IterativeSolvers]] deps = ["LinearAlgebra", "Printf", "Random", "RecipesBase", "SparseArrays"] -git-tree-sha1 = "1a8c6237e78b714e901e406c096fc8a65528af7d" +git-tree-sha1 = "1169632f425f79429f245113b775a0e3d121457c" uuid = "42fd0dbc-a981-5370-80f2-aaf504508153" -version = "0.9.1" +version = "0.9.2" [[JLD2]] deps = ["DataStructures", "FileIO", "MacroTools", "Mmap", "Pkg", "Printf", "Reexport", "TranscodingStreams", "UUIDs"] @@ -246,10 +252,10 @@ deps = ["Libdl"] uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" [[LogExpFunctions]] -deps = ["ChainRulesCore", "DocStringExtensions", "InverseFunctions", "IrrationalConstants", "LinearAlgebra"] -git-tree-sha1 = "6193c3815f13ba1b78a51ce391db8be016ae9214" +deps = ["ChainRulesCore", "ChangesOfVariables", "DocStringExtensions", "InverseFunctions", "IrrationalConstants", "LinearAlgebra"] +git-tree-sha1 = "be9eef9f9d78cecb6f262f3c10da151a6c5ab827" uuid = "2ab3a3ac-af41-5b50-aa03-7779005ae688" -version = "0.3.4" +version = "0.3.5" [[Logging]] uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" @@ -268,9 +274,9 @@ version = "3.4.2+1" [[MacroTools]] deps = ["Markdown", "Random"] -git-tree-sha1 = "5a5bc6bf062f0f95e62d0fe0a2d99699fed82dd9" +git-tree-sha1 = "3d3e902b31198a27340d0bf00d6ac452866021cf" uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09" -version = "0.5.8" +version = "0.5.9" [[Markdown]] deps = ["Base64"] @@ -294,9 +300,9 @@ uuid = "14a3606d-f60d-562e-9121-12d972cd8159" [[NLSolversBase]] deps = ["DiffResults", "Distributed", "FiniteDiff", "ForwardDiff"] -git-tree-sha1 = "144bab5b1443545bc4e791536c9f1eacb4eed06a" +git-tree-sha1 = "50310f934e55e5ca3912fb941dec199b49ca9b68" uuid = "d41bc354-129a-5804-8e4c-c37616107c6c" -version = "7.8.1" +version = "7.8.2" [[NLsolve]] deps = ["Distances", "LineSearches", "LinearAlgebra", "NLSolversBase", "Printf", "Reexport"] @@ -359,15 +365,15 @@ version = "0.12.3" [[Parsers]] deps = ["Dates"] -git-tree-sha1 = "f19e978f81eca5fd7620650d7dbea58f825802ee" +git-tree-sha1 = "ae4bbcadb2906ccc085cf52ac286dc1377dceccc" uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0" -version = "2.1.0" +version = "2.1.2" [[PartitionedArrays]] deps = ["Distances", "IterativeSolvers", "LinearAlgebra", "MPI", "Printf", "SparseArrays", "SparseMatricesCSR"] -git-tree-sha1 = "fc308afeda353c9e8f8413b7cfe944615defa853" +git-tree-sha1 = "d67ba9b3b6c4ff0024c6e5d9aa500f0e3a40cba2" uuid = "5a9dfac6-5c52-46f7-8278-5e2210713be9" -version = "0.2.4" +version = "0.2.7" [[Pkg]] deps = ["Artifacts", "Dates", "Downloads", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "Serialization", "TOML", "Tar", "UUIDs", "p7zip_jll"] @@ -499,9 +505,9 @@ uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" [[WriteVTK]] deps = ["Base64", "CodecZlib", "FillArrays", "LightXML", "TranscodingStreams"] -git-tree-sha1 = "3c6dd58eac1c7941b853790edf30dbfe8a6f57df" +git-tree-sha1 = "4642a7b953ed9f7f043bfba58b17d6c9722d09b2" uuid = "64499a7a-5c06-52f2-abe2-ccb03c286192" -version = "1.12.0" +version = "1.12.2" [[XML2_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl", "Libiconv_jll", "Pkg", "Zlib_jll"] diff --git a/src/PETScArrays.jl b/src/PETScArrays.jl index f050eb7..b8c5086 100644 --- a/src/PETScArrays.jl +++ b/src/PETScArrays.jl @@ -106,50 +106,24 @@ function Base.convert(::Type{PETScVector},a::AbstractVector) PETScVector(array) end -function Base.copy!(a::AbstractVector,petsc_vec::Vec) - aux=PETScVector() - aux.vec[] = petsc_vec.ptr - Base.copy!(a,aux) -end - -function Base.copy!(petsc_vec::Vec,a::AbstractVector) - aux=PETScVector() - aux.vec[] = petsc_vec.ptr - Base.copy!(aux,a) -end -function Base.copy!(vec::AbstractVector,petscvec::PETScVector) - lg=get_local_oh_vector(petscvec) - if isa(lg,PETScVector) # petscvec is a ghosted vector - lx=get_local_vector(lg) - @assert length(lx)==length(vec) - vec .= lx - restore_local_vector!(lx,lg) - GridapPETSc.Finalize(lg) - else # petscvec is NOT a ghosted vector - @assert length(lg)==length(vec) - vec .= lg - restore_local_vector!(lg,petscvec) +function Base.copy!(vec::AbstractVector,petscvec::Vec) + ni = length(vec) + ix = collect(PetscInt,0:(ni-1)) + v = convert(Vector{PetscScalar},vec) + @check_error_code PETSC.VecGetValues(petscvec.ptr,ni,ix,v) + if !(v === vec) + vec .= v end end -function Base.copy!(petscvec::PETScVector,vec::AbstractVector) - lg=get_local_oh_vector(petscvec) - if isa(lg,PETScVector) # petscvec is a ghosted vector - lx=get_local_vector(lg) - @assert length(lx)==length(vec) - lx .= vec - restore_local_vector!(lx,lg) - GridapPETSc.Finalize(lg) - else # petscvec is NOT a ghosted vector - @assert length(lg)==length(vec) - lg .= vec - restore_local_vector!(lg,petscvec) - end +function Base.copy!(petscvec::Vec,vec::AbstractVector) + ni = length(vec) + ix = collect(PetscInt,0:(ni-1)) + v = convert(Vector{PetscScalar},vec) + @check_error_code PETSC.VecSetValues(petscvec.ptr,ni,ix,v,PETSC.INSERT_VALUES) end - - function get_local_oh_vector(a::PETScVector) v=PETScVector() @check_error_code PETSC.VecGhostGetLocalForm(a.vec[],v.vec) diff --git a/src/PETScNonlinearSolvers.jl b/src/PETScNonlinearSolvers.jl index f818a88..f366d7d 100644 --- a/src/PETScNonlinearSolvers.jl +++ b/src/PETScNonlinearSolvers.jl @@ -47,8 +47,6 @@ function snes_residual(csnes::Ptr{Cvoid}, ctx::Ptr{Cvoid})::PetscInt cache = unsafe_pointer_to_objref(ctx) - println("residual") - # 1. Transfer cx to Julia data structures copy!(cache.x_sys_layout, Vec(cx)) copy!(cache.x_fe_space_layout,cache.x_sys_layout) From 0379aa105f471bf85d68aeb9ec03b4e174de92f8 Mon Sep 17 00:00:00 2001 From: amartin Date: Fri, 12 Nov 2021 22:55:10 +1100 Subject: [PATCH 06/13] Removing _set_petsc_XXX_function(s) --- src/PETScNonlinearSolvers.jl | 12 ++++++++++-- src/PartitionedArrays.jl | 12 ++++++++++++ 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/src/PETScNonlinearSolvers.jl b/src/PETScNonlinearSolvers.jl index f366d7d..838ee3f 100644 --- a/src/PETScNonlinearSolvers.jl +++ b/src/PETScNonlinearSolvers.jl @@ -175,8 +175,16 @@ end function Algebra.solve!(x::AbstractVector,nls::PETScNonlinearSolver,op::NonlinearOperator,::Nothing) cache=_setup_cache(x,nls,op) - _set_petsc_residual_function!(nls,cache) - _set_petsc_jacobian_function!(nls,cache) + + # set petsc residual function + ctx = pointer_from_objref(cache) + fptr = @cfunction(snes_residual, PetscInt, (Ptr{Cvoid}, Ptr{Cvoid}, Ptr{Cvoid}, Ptr{Cvoid})) + PETSC.SNESSetFunction(cache.snes[],cache.res_petsc.vec[],fptr,ctx) + + # set petsc jacobian function + fptr = @cfunction(snes_jacobian, PetscInt, (Ptr{Cvoid}, Ptr{Cvoid}, Ptr{Cvoid}, Ptr{Cvoid},Ptr{Cvoid})) + PETSC.SNESSetJacobian(cache.snes[],cache.jac_petsc_mat_A.mat[],cache.jac_petsc_mat_A.mat[],fptr,ctx) + @check_error_code PETSC.SNESSolve(cache.snes[],C_NULL,cache.x_petsc.vec[]) _copy_and_exchange!(x,cache.x_petsc) cache diff --git a/src/PartitionedArrays.jl b/src/PartitionedArrays.jl index ca4c0a1..a617f1c 100644 --- a/src/PartitionedArrays.jl +++ b/src/PartitionedArrays.jl @@ -82,6 +82,18 @@ function PartitionedArrays.PVector(v::PETScVector,ids::PRange,::MPIBackend) PVector(values,ids) end +function Base.copy!(a::PVector,petsc_vec::Vec) + aux=PETScVector() + aux.vec[] = petsc_vec.ptr + Base.copy!(a,aux) +end + +function Base.copy!(petsc_vec::Vec,a::PVector) + aux=PETScVector() + aux.vec[] = petsc_vec.ptr + Base.copy!(aux,a) +end + function Base.copy!(pvec::PVector,petscvec::PETScVector) if get_backend(pvec.values) == mpi From 9b21f583d4a59c98192ec1a6b65c44b2ca257916 Mon Sep 17 00:00:00 2001 From: amartin Date: Fri, 12 Nov 2021 23:40:40 +1100 Subject: [PATCH 07/13] Removing exchange! after solve --- src/PETScLinearSolvers.jl | 2 +- src/PETScNonlinearSolvers.jl | 20 ++------------------ 2 files changed, 3 insertions(+), 19 deletions(-) diff --git a/src/PETScLinearSolvers.jl b/src/PETScLinearSolvers.jl index b79b007..babc0c9 100644 --- a/src/PETScLinearSolvers.jl +++ b/src/PETScLinearSolvers.jl @@ -83,7 +83,7 @@ function Algebra.solve!(x::PVector,ns::PETScLinearSolverNS,b::PVector) copy!(X,x) Y = convert(PETScVector,X) solve!(Y,ns,b) - _copy_and_exchange!(x,Y) + copy!(x,Y) end diff --git a/src/PETScNonlinearSolvers.jl b/src/PETScNonlinearSolvers.jl index 838ee3f..dc3d1b1 100644 --- a/src/PETScNonlinearSolvers.jl +++ b/src/PETScNonlinearSolvers.jl @@ -146,22 +146,6 @@ function _setup_cache(x::AbstractVector,nls::PETScNonlinearSolver,op::NonlinearO jac_petsc_mat_A, jac_petsc_mat_A) end -# Helper private functions to implement the solve! methods below. -# It allows to execute the solve! methods below in a serial context, i.e., -# whenever -function _myexchange!(x::AbstractVector) - x -end -function _myexchange!(x::PVector) - exchange!(x) -end - -function _copy_and_exchange!(a::AbstractVector,b::PETScVector) - copy!(a,b.vec[]) - _myexchange!(a) -end - - function Algebra.solve!(x::T, nls::PETScNonlinearSolver, op::NonlinearOperator, @@ -169,7 +153,7 @@ function Algebra.solve!(x::T, @assert cache.op === op @check_error_code PETSC.SNESSolve(cache.snes[],C_NULL,cache.x_petsc.vec[]) - _copy_and_exchange!(x,cache.x_petsc) + copy!(x,cache.x_petsc) cache end @@ -186,6 +170,6 @@ function Algebra.solve!(x::AbstractVector,nls::PETScNonlinearSolver,op::Nonlinea PETSC.SNESSetJacobian(cache.snes[],cache.jac_petsc_mat_A.mat[],cache.jac_petsc_mat_A.mat[],fptr,ctx) @check_error_code PETSC.SNESSolve(cache.snes[],C_NULL,cache.x_petsc.vec[]) - _copy_and_exchange!(x,cache.x_petsc) + copy!(x,cache.x_petsc) cache end From 1500d6320e6da62b74311fb902a610d09810e2ef Mon Sep 17 00:00:00 2001 From: amartin Date: Sat, 13 Nov 2021 15:43:37 +1100 Subject: [PATCH 08/13] Fixing a BUG in convert AbstractMatrix to PETScMatrix --- src/PETScArrays.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/PETScArrays.jl b/src/PETScArrays.jl index b8c5086..2b44155 100644 --- a/src/PETScArrays.jl +++ b/src/PETScArrays.jl @@ -322,7 +322,7 @@ function Base.convert(::Type{PETScMatrix}, a::AbstractMatrix{PetscScalar}) j = [PetscInt(j-1) for i=1:m for j=1:n] v = [ a[i,j] for i=1:m for j=1:n] A = PETScMatrix() - A.ownership = a + A.ownership = (i,j,v) @check_error_code PETSC.MatCreateSeqAIJWithArrays(MPI.COMM_SELF,m,n,i,j,v,A.mat) @check_error_code PETSC.MatAssemblyBegin(A.mat[],PETSC.MAT_FINAL_ASSEMBLY) @check_error_code PETSC.MatAssemblyEnd(A.mat[],PETSC.MAT_FINAL_ASSEMBLY) From 5a7b90a2bb7924bc77776c6bc72e133014abbc8c Mon Sep 17 00:00:00 2001 From: amartin Date: Sat, 13 Nov 2021 16:14:16 +1100 Subject: [PATCH 09/13] Fix to avoid error in PETSc when compiled in debug mode (base) amartin@sistemas-ThinkPad-X1-Carbon-6th:~/git-repos/GridapPETSc.jl$ julia --project=. test/sequential/PETScAssemblyTests.jl [0] PetscDetermineInitialFPTrap(): Floating point trapping is off by default 0 [0] PetscInitialize(): PETSc successfully started: number of processors = 1 [0] PetscGetHostName(): Rejecting domainname, likely is NIS sistemas-ThinkPad-X1-Carbon-6th.(none) [0] PetscInitialize(): Running on machine: sistemas-ThinkPad-X1-Carbon-6th [0] PetscCommDuplicate(): Duplicating a communicator 140230383909504 25901472 max tags = 2147483647 [0]PETSC ERROR: --------------------- Error Message -------------------------------------------------------------- [0]PETSC ERROR: Argument out of range [0]PETSC ERROR: nnz cannot be greater than row length: local row 0 value 5 rowlength 3 [0]PETSC ERROR: See https://petsc.org/release/faq/ for trouble shooting. [0]PETSC ERROR: Petsc Release Version 3.15.4, Sep 01, 2021 [0]PETSC ERROR: GridapPETSc on a x86_64 named sistemas-ThinkPad-X1-Carbon-6th by amartin Fri Nov 12 23:58:38 2021 [0]PETSC ERROR: Configure options --with-cc=mpicc --with-cxx=mpicxx --with-fc=mpif90 -with-blaslapack-dir=/opt/intel/compilers_and_libraries_2020.0.166/linux/mkl --download-mumps --download-scalapack --download-parmetis --download-metis --download-ptscotch --with-debugging --with-x=0 --with-shared-libraries=1 --with-mpi=1 --with-64-bit-indices [0]PETSC ERROR: #1 MatSeqAIJSetPreallocation_SeqAIJ() at /home/amartin/software_installers/petsc-3.15.4-build-dbg-gnu9-mpi/src/mat/impls/aij/seq/aij.c:4101 [0]PETSC ERROR: #2 MatCreateSeqAIJ() at /home/amartin/software_installers/petsc-3.15.4-build-dbg-gnu9-mpi/src/mat/impls/aij/seq/aij.c:4015 ERROR: LoadError: Petsc returned with error code: 63 Stacktrace: [1] macro expansion @ ~/git-repos/GridapPETSc.jl/src/Config.jl:88 [inlined] [2] nz_allocation(a::GridapPETSc.MatCounter{Gridap.Algebra.Loop}) @ GridapPETSc ~/git-repos/GridapPETSc.jl/src/PETScAssembly.jl:178 [3] (::Main.PETScAssemblyTests.var"#1#2")() @ Main.PETScAssemblyTests ~/git-repos/GridapPETSc.jl/test/sequential/PETScAssemblyTests.jl:28 [4] with(f::Main.PETScAssemblyTests.var"#1#2"; kwargs::Base.Iterators.Pairs{Symbol, Vector{SubString{String}}, Tuple{Symbol}, NamedTuple{(:args,), Tuple{Vector{SubString{String}}}}}) @ GridapPETSc ~/git-repos/GridapPETSc.jl/src/Environment.jl:38 [5] top-level scope @ ~/git-repos/GridapPETSc.jl/test/sequential/PETScAssemblyTests.jl:14 in expression starting at /home/amartin/git-repos/GridapPETSc.jl/test/sequential/PETScAssemblyTests.jl:1 [0] PetscFinalize(): PetscFinalize() called --- src/PETScAssembly.jl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/PETScAssembly.jl b/src/PETScAssembly.jl index 6799d94..9b5c27e 100644 --- a/src/PETScAssembly.jl +++ b/src/PETScAssembly.jl @@ -173,7 +173,9 @@ function Algebra.nz_allocation(a::MatCounter) m = a.nrows n = a.ncols nz = PETSC.PETSC_DEFAULT - nnz = a.rownnzmax + # nnz cannot be larger than the number of columns + # Otherwise PETSc complains when compiled in DEBUG mode + nnz = broadcast(min,a.rownnzmax,n) b = PETScMatrix() @check_error_code PETSC.MatCreateSeqAIJ(comm,m,n,nz,nnz,b.mat) Init(b) From 9ad836ce5c0081c30017c34bd47ba7fa5a1cc3d1 Mon Sep 17 00:00:00 2001 From: amartin Date: Sat, 13 Nov 2021 16:53:07 +1100 Subject: [PATCH 10/13] Ensure that the result of the broadcast is of type PETScInt --- src/PETScAssembly.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/PETScAssembly.jl b/src/PETScAssembly.jl index 9b5c27e..514a3c7 100644 --- a/src/PETScAssembly.jl +++ b/src/PETScAssembly.jl @@ -175,7 +175,7 @@ function Algebra.nz_allocation(a::MatCounter) nz = PETSC.PETSC_DEFAULT # nnz cannot be larger than the number of columns # Otherwise PETSc complains when compiled in DEBUG mode - nnz = broadcast(min,a.rownnzmax,n) + nnz = broadcast(min,a.rownnzmax,PetscInt(n)) b = PETScMatrix() @check_error_code PETSC.MatCreateSeqAIJ(comm,m,n,nz,nnz,b.mat) Init(b) From 758a12d8de3e4e338046c42ebb8c09d5300cfc9c Mon Sep 17 00:00:00 2001 From: amartin Date: Sat, 13 Nov 2021 17:41:17 +1100 Subject: [PATCH 11/13] Revert "Removing exchange! after solve" This reverts commit 9b21f583d4a59c98192ec1a6b65c44b2ca257916. --- src/PETScLinearSolvers.jl | 2 +- src/PETScNonlinearSolvers.jl | 20 ++++++++++++++++++-- 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/src/PETScLinearSolvers.jl b/src/PETScLinearSolvers.jl index babc0c9..b79b007 100644 --- a/src/PETScLinearSolvers.jl +++ b/src/PETScLinearSolvers.jl @@ -83,7 +83,7 @@ function Algebra.solve!(x::PVector,ns::PETScLinearSolverNS,b::PVector) copy!(X,x) Y = convert(PETScVector,X) solve!(Y,ns,b) - copy!(x,Y) + _copy_and_exchange!(x,Y) end diff --git a/src/PETScNonlinearSolvers.jl b/src/PETScNonlinearSolvers.jl index dc3d1b1..838ee3f 100644 --- a/src/PETScNonlinearSolvers.jl +++ b/src/PETScNonlinearSolvers.jl @@ -146,6 +146,22 @@ function _setup_cache(x::AbstractVector,nls::PETScNonlinearSolver,op::NonlinearO jac_petsc_mat_A, jac_petsc_mat_A) end +# Helper private functions to implement the solve! methods below. +# It allows to execute the solve! methods below in a serial context, i.e., +# whenever +function _myexchange!(x::AbstractVector) + x +end +function _myexchange!(x::PVector) + exchange!(x) +end + +function _copy_and_exchange!(a::AbstractVector,b::PETScVector) + copy!(a,b.vec[]) + _myexchange!(a) +end + + function Algebra.solve!(x::T, nls::PETScNonlinearSolver, op::NonlinearOperator, @@ -153,7 +169,7 @@ function Algebra.solve!(x::T, @assert cache.op === op @check_error_code PETSC.SNESSolve(cache.snes[],C_NULL,cache.x_petsc.vec[]) - copy!(x,cache.x_petsc) + _copy_and_exchange!(x,cache.x_petsc) cache end @@ -170,6 +186,6 @@ function Algebra.solve!(x::AbstractVector,nls::PETScNonlinearSolver,op::Nonlinea PETSC.SNESSetJacobian(cache.snes[],cache.jac_petsc_mat_A.mat[],cache.jac_petsc_mat_A.mat[],fptr,ctx) @check_error_code PETSC.SNESSolve(cache.snes[],C_NULL,cache.x_petsc.vec[]) - copy!(x,cache.x_petsc) + _copy_and_exchange!(x,cache.x_petsc) cache end From d2f566f77fd173d0ad939cca2b96c4ffd6de22ed Mon Sep 17 00:00:00 2001 From: amartin Date: Tue, 23 Nov 2021 13:23:33 +1100 Subject: [PATCH 12/13] Reorganized/cleaned Base.copy! methods among PETSc and Julia objects --- Manifest.toml | 2 - Project.toml | 1 - src/PETScArrays.jl | 48 ++++---- src/PETScNonlinearSolvers.jl | 10 +- src/PartitionedArrays.jl | 174 +++++++++++++++------------- test/sequential/ElasticityDriver.jl | 6 +- 6 files changed, 131 insertions(+), 110 deletions(-) diff --git a/Manifest.toml b/Manifest.toml index 082ee85..e47fa82 100644 --- a/Manifest.toml +++ b/Manifest.toml @@ -161,8 +161,6 @@ version = "0.17.5" [[GridapDistributed]] deps = ["FillArrays", "Gridap", "LinearAlgebra", "MPI", "PartitionedArrays", "SparseArrays", "WriteVTK"] git-tree-sha1 = "712274a12c745b84be465bd5165b12d3d51bd955" -repo-rev = "master" -repo-url = "https://github.com/gridap/GridapDistributed.jl" uuid = "f9701e48-63b3-45aa-9a63-9bc6c271f355" version = "0.2.0" diff --git a/Project.toml b/Project.toml index 5c536a6..aa79dce 100644 --- a/Project.toml +++ b/Project.toml @@ -16,7 +16,6 @@ SparseMatricesCSR = "a0a7dd2c-ebf4-11e9-1f05-cf50bc540ca1" [compat] Gridap = "0.17" -GridapDistributed = "0.2.0" MPI = "0.14, 0.15, 0.16, 0.17, 0.18, 0.19" PETSc_jll = "3.13" PartitionedArrays = "0.2.4" diff --git a/src/PETScArrays.jl b/src/PETScArrays.jl index 2b44155..f8562e0 100644 --- a/src/PETScArrays.jl +++ b/src/PETScArrays.jl @@ -106,27 +106,36 @@ function Base.convert(::Type{PETScVector},a::AbstractVector) PETScVector(array) end +function Base.copy!(a::AbstractVector,b::PETScVector) + @check length(a) == length(b) + _copy!(a,b.vec[]) +end -function Base.copy!(vec::AbstractVector,petscvec::Vec) - ni = length(vec) +function _copy!(a::Vector,b::Vec) + ni = length(a) ix = collect(PetscInt,0:(ni-1)) - v = convert(Vector{PetscScalar},vec) - @check_error_code PETSC.VecGetValues(petscvec.ptr,ni,ix,v) - if !(v === vec) - vec .= v + v = convert(Vector{PetscScalar},a) + @check_error_code PETSC.VecGetValues(b.ptr,ni,ix,v) + if !(v === a) + a .= v end end -function Base.copy!(petscvec::Vec,vec::AbstractVector) - ni = length(vec) +function Base.copy!(a::PETScVector,b::AbstractVector) + @check length(a) == length(b) + _copy!(a.vec[],b) +end + +function _copy!(a::Vec,b::AbstractVector) + ni = length(b) ix = collect(PetscInt,0:(ni-1)) - v = convert(Vector{PetscScalar},vec) - @check_error_code PETSC.VecSetValues(petscvec.ptr,ni,ix,v,PETSC.INSERT_VALUES) + v = convert(Vector{PetscScalar},b) + @check_error_code PETSC.VecSetValues(a.ptr,ni,ix,v,PETSC.INSERT_VALUES) end -function get_local_oh_vector(a::PETScVector) +function get_local_oh_vector(a::Vec) v=PETScVector() - @check_error_code PETSC.VecGhostGetLocalForm(a.vec[],v.vec) + @check_error_code PETSC.VecGhostGetLocalForm(a.ptr,v.vec) if v.vec[] != C_NULL # a is a ghosted vector v.ownership=a Init(v) @@ -277,14 +286,11 @@ function Base.copy(a::PETScMatrix) Init(v) end -function Base.copy!(petscmat::Mat,a::AbstractMatrix) - aux=PETScMatrix() - aux.mat[] = petscmat.ptr - Base.copy!(aux,a) +function Base.copy!(a::PETScMatrix,b::AbstractMatrix) + _copy!(a.mat[],b) end - -function Base.copy!(petscmat::PETScMatrix,mat::AbstractMatrix) +function _copy!(petscmat::Mat,mat::Matrix) n = size(mat)[2] cols = [PetscInt(j-1) for j=1:n] row = Vector{PetscInt}(undef,1) @@ -292,7 +298,7 @@ function Base.copy!(petscmat::PETScMatrix,mat::AbstractMatrix) for i=1:size(mat)[1] row[1]=PetscInt(i-1) vals .= view(mat,i,:) - PETSC.MatSetValues(petscmat.mat[], + PETSC.MatSetValues(petscmat.ptr, PetscInt(1), row, n, @@ -300,8 +306,8 @@ function Base.copy!(petscmat::PETScMatrix,mat::AbstractMatrix) vals, PETSC.INSERT_VALUES) end - @check_error_code PETSC.MatAssemblyBegin(petscmat.mat[], PETSC.MAT_FINAL_ASSEMBLY) - @check_error_code PETSC.MatAssemblyEnd(petscmat.mat[] , PETSC.MAT_FINAL_ASSEMBLY) + @check_error_code PETSC.MatAssemblyBegin(petscmat.ptr, PETSC.MAT_FINAL_ASSEMBLY) + @check_error_code PETSC.MatAssemblyEnd(petscmat.ptr, PETSC.MAT_FINAL_ASSEMBLY) end diff --git a/src/PETScNonlinearSolvers.jl b/src/PETScNonlinearSolvers.jl index 838ee3f..cadbb3b 100644 --- a/src/PETScNonlinearSolvers.jl +++ b/src/PETScNonlinearSolvers.jl @@ -48,14 +48,14 @@ function snes_residual(csnes::Ptr{Cvoid}, cache = unsafe_pointer_to_objref(ctx) # 1. Transfer cx to Julia data structures - copy!(cache.x_sys_layout, Vec(cx)) + _copy!(cache.x_sys_layout, Vec(cx)) copy!(cache.x_fe_space_layout,cache.x_sys_layout) # 2. Evaluate residual into Julia data structures residual!(cache.res_sys_layout, cache.op, cache.x_fe_space_layout) # 3. Transfer Julia residual to PETSc residual (cfx) - copy!(Vec(cfx), cache.res_sys_layout) + _copy!(Vec(cfx), cache.res_sys_layout) return PetscInt(0) end @@ -70,14 +70,14 @@ function snes_jacobian(csnes:: Ptr{Cvoid}, # 1. Transfer cx to Julia data structures # Extract pointer to array of values out of cx and put it in a PVector - copy!(cache.x_sys_layout, Vec(cx)) + _copy!(cache.x_sys_layout, Vec(cx)) copy!(cache.x_fe_space_layout,cache.x_sys_layout) # 2. Evaluate Jacobian into Julia data structures jacobian!(cache.jac_mat_A,cache.op,cache.x_fe_space_layout) # 3. Transfer nls.jac_mat_A/P to PETSc (cA/cP) - copy!(Mat(cA), cache.jac_mat_A) + _copy!(Mat(cA), cache.jac_mat_A) return PetscInt(0) end @@ -157,7 +157,7 @@ function _myexchange!(x::PVector) end function _copy_and_exchange!(a::AbstractVector,b::PETScVector) - copy!(a,b.vec[]) + copy!(a,b) _myexchange!(a) end diff --git a/src/PartitionedArrays.jl b/src/PartitionedArrays.jl index a617f1c..f4edb00 100644 --- a/src/PartitionedArrays.jl +++ b/src/PartitionedArrays.jl @@ -82,86 +82,64 @@ function PartitionedArrays.PVector(v::PETScVector,ids::PRange,::MPIBackend) PVector(values,ids) end -function Base.copy!(a::PVector,petsc_vec::Vec) - aux=PETScVector() - aux.vec[] = petsc_vec.ptr - Base.copy!(a,aux) -end - -function Base.copy!(petsc_vec::Vec,a::PVector) - aux=PETScVector() - aux.vec[] = petsc_vec.ptr - Base.copy!(aux,a) -end +_copy!(a::PVector{<:SequentialData},b::Vec) = @notimplemented +_copy!(a::Vec,b::PVector{<:SequentialData}) = @notimplemented +function _copy!(pvec::PVector{<:MPIData},petscvec::Vec) + map_parts(get_part_ids(pvec.values),pvec.values,pvec.rows.partition) do part, values, indices + lg=get_local_oh_vector(petscvec) + if (isa(lg,PETScVector)) # petsc_vec is a ghosted vector + # Only copying owned DoFs. This should be followed by + # an exchange if the ghost DoFs of pvec are to be consumed. + # We are assuming here that the layout of pvec and petsvec + # are compatible. We do not have any information about the + # layout of petscvec to check this out. We decided NOT to + # convert petscvec into a PVector to avoid extra memory allocation + # and copies. + @assert pvec.rows.ghost + lx=get_local_vector(lg) + vvalues=view(values,indices.oid_to_lid) + vvalues .= lx[1:num_oids(indices)] + restore_local_vector!(lx,lg) + GridapPETSc.Finalize(lg) + else # petsc_vec is NOT a ghosted vector + # @assert !pvec.rows.ghost + # @assert length(lg)==length(values) + # values .= lg + # restore_local_vector!(petscvec,lg) -function Base.copy!(pvec::PVector,petscvec::PETScVector) - if get_backend(pvec.values) == mpi - map_parts(get_part_ids(pvec.values),pvec.values,pvec.rows.partition) do part, values, indices - lg=get_local_oh_vector(petscvec) - if (isa(lg,PETScVector)) # petsc_vec is a ghosted vector - # Only copying owned DoFs. This should be followed by - # an exchange if the ghost DoFs of pvec are to be consumed. - # We are assuming here that the layout of pvec and petsvec - # are compatible. We do not have any information about the - # layout of petscvec to check this out. We decided NOT to - # convert petscvec into a PVector to avoid extra memory allocation - # and copies. - @assert pvec.rows.ghost - lx=get_local_vector(lg) - vvalues=view(values,indices.oid_to_lid) - vvalues .= lx[1:num_oids(indices)] - restore_local_vector!(lx,lg) - GridapPETSc.Finalize(lg) - else # petsc_vec is NOT a ghosted vector - # @assert !pvec.rows.ghost - # @assert length(lg)==length(values) - # values .= lg - # restore_local_vector!(petscvec,lg) - - # If am not wrong, the code should never enter here. At least - # given how it is being leveraged at present from GridapPETsc. - # If in the future we need this case, the commented lines of - # code above could serve the purpose (not tested). - @notimplemented - end - end - elseif get_backend(pvec.values) == sequential - # I left this as an exercise to the interested. - @notimplemented + # If am not wrong, the code should never enter here. At least + # given how it is being leveraged at present from GridapPETsc. + # If in the future we need this case, the commented lines of + # code above could serve the purpose (not tested). + @notimplemented + end end pvec end -function Base.copy!(petscvec::PETScVector,pvec::PVector) - if get_backend(pvec.values) == mpi - map_parts(pvec.values,pvec.rows.partition) do values, indices - @check isa(indices,IndexRange) "Unsupported partition for PETSc vectors" - lg=get_local_oh_vector(petscvec) - if (isa(lg,PETScVector)) # petscvec is a ghosted vector - lx=get_local_vector(lg) - # Only copying owned DoFs. This should be followed by - # an exchange if the ghost DoFs of petscvec are to be consumed. - # We are assuming here that the layout of pvec and petsvec - # are compatible. We do not have any information about the - # layout of petscvec to check this out. - lx[1:num_oids(indices)] .= values[1:num_oids(indices)] - restore_local_vector!(lx,lg) - GridapPETSc.Finalize(lg) - else - # @assert !pvec.rows.ghost - # @assert length(lg)==length(values) - # lg .= values - # restore_local_vector!(lg,petscvec) - # See comment in the function copy! right above - @notimplemented - end - end - elseif get_backend(pvec.values) == sequential - # I leave this as an exercise to the interested. Essentially the - # same approach as in Base.copy!(petscvec::PETScMatrix,pvec::PSparseMatrix) - # has to be followed. - @notimplemented +function Base.copy!(petscvec::Vec,pvec::PVector{<:MPIData}) + map_parts(pvec.values,pvec.rows.partition) do values, indices + @check isa(indices,IndexRange) "Unsupported partition for PETSc vectors" + lg=get_local_oh_vector(petscvec) + if (isa(lg,PETScVector)) # petscvec is a ghosted vector + lx=get_local_vector(lg) + # Only copying owned DoFs. This should be followed by + # an exchange if the ghost DoFs of petscvec are to be consumed. + # We are assuming here that the layout of pvec and petsvec + # are compatible. We do not have any information about the + # layout of petscvec to check this out. + lx[1:num_oids(indices)] .= values[1:num_oids(indices)] + restore_local_vector!(lx,lg) + GridapPETSc.Finalize(lg) + else + # @assert !pvec.rows.ghost + # @assert length(lg)==length(values) + # lg .= values + # restore_local_vector!(lg,petscvec) + # See comment in the function copy! right above + @notimplemented + end end petscvec end @@ -209,7 +187,47 @@ function _petsc_matrix(a::PSparseMatrix,::MPIBackend) b end -function Base.copy!(petscmat::PETScMatrix,mat::PSparseMatrix) +function _copy!(petscmat::Mat,mat::PSparseMatrix{<:SequentialData}) + parts=get_part_ids(mat.values) + map_parts(parts, mat.values,mat.rows.partition,mat.cols.partition) do part, lmat, rdofs, cdofs + @check isa(rdofs,IndexRange) "Not supported partition for PETSc matrices" + @check isa(cdofs,IndexRange) "Not supported partition for PETSc matrices" + Tm = SparseMatrixCSR{0,PetscScalar,PetscInt} + csr = convert(Tm,lmat) + ia = csr.rowptr + ja = csr.colval + a = csr.nzval + m = csr.m + n = csr.n + maxnnz = maximum( ia[i+1]-ia[i] for i=1:m ) + row = Vector{PetscInt}(undef,1) + cols = Vector{PetscInt}(undef,maxnnz) + for i=1:num_oids(rdofs) + lid=rdofs.oid_to_lid[i] + row[1]=PetscInt(rdofs.lid_to_gid[lid]-1) + current=1 + for j=ia[lid]+1:ia[lid+1] + col=ja[j]+1 + cols[current]=PetscInt(cdofs.lid_to_gid[col]-1) + current=current+1 + end + vals = view(a,ia[lid]+1:ia[lid+1]) + PETSC.MatSetValues(petscmat.ptr, + PetscInt(1), + row, + ia[lid+1]-ia[lid], + cols, + vals, + PETSC.INSERT_VALUES) + end + end + @check_error_code PETSC.MatAssemblyBegin(petscmat.ptr, PETSC.MAT_FINAL_ASSEMBLY) + @check_error_code PETSC.MatAssemblyEnd(petscmat.ptr, PETSC.MAT_FINAL_ASSEMBLY) +end + +_copy!(::PSparseMatrix{<:SequentialData},::Mat) = @notimplemented +_copy!(::PSparseMatrix{<:MPIData},::Mat) = @notimplemented +function Base.copy!(petscmat::Mat,mat::PSparseMatrix{<:MPIData}) parts=get_part_ids(mat.values) map_parts(parts, mat.values,mat.rows.partition,mat.cols.partition) do part, lmat, rdofs, cdofs @check isa(rdofs,IndexRange) "Not supported partition for PETSc matrices" @@ -234,7 +252,7 @@ function Base.copy!(petscmat::PETScMatrix,mat::PSparseMatrix) current=current+1 end vals = view(a,ia[lid]+1:ia[lid+1]) - PETSC.MatSetValues(petscmat.mat[], + PETSC.MatSetValues(petscmat.ptr, PetscInt(1), row, ia[lid+1]-ia[lid], @@ -243,6 +261,6 @@ function Base.copy!(petscmat::PETScMatrix,mat::PSparseMatrix) PETSC.INSERT_VALUES) end end - @check_error_code PETSC.MatAssemblyBegin(petscmat.mat[], PETSC.MAT_FINAL_ASSEMBLY) - @check_error_code PETSC.MatAssemblyEnd(petscmat.mat[], PETSC.MAT_FINAL_ASSEMBLY) + @check_error_code PETSC.MatAssemblyBegin(petscmat.ptr, PETSC.MAT_FINAL_ASSEMBLY) + @check_error_code PETSC.MatAssemblyEnd(petscmat.ptr, PETSC.MAT_FINAL_ASSEMBLY) end diff --git a/test/sequential/ElasticityDriver.jl b/test/sequential/ElasticityDriver.jl index cbee5a7..1a6d561 100644 --- a/test/sequential/ElasticityDriver.jl +++ b/test/sequential/ElasticityDriver.jl @@ -76,13 +76,13 @@ GridapPETSc.with(args=split(options)) do has_constant = PETSC.PETSC_TRUE vecs = map(i->i.vec[],ns_vecs) nvecs = length(vecs) - @check_error_code PETSC.MatNullSpaceCreate(comm,has_constant,nvecs,vecs,nulls) - @check_error_code PETSC.MatSetNearNullSpace(A.mat[],nulls[]) + #@check_error_code PETSC.MatNullSpaceCreate(comm,has_constant,nvecs,vecs,nulls) + #@check_error_code PETSC.MatSetNearNullSpace(A.mat[],nulls[]) solver = PETScLinearSolver() solve!(x,solver,A,b) - @check_error_code PETSC.MatNullSpaceDestroy(nulls) + #@check_error_code PETSC.MatNullSpaceDestroy(nulls) r = A*x-b @test norm(r) < tol From d906f635c9863c7d3c347f48549ce72f78c62d19 Mon Sep 17 00:00:00 2001 From: amartin Date: Tue, 23 Nov 2021 13:33:46 +1100 Subject: [PATCH 13/13] Revert "Revert "Removing exchange! after solve"" This reverts commit 758a12d8de3e4e338046c42ebb8c09d5300cfc9c. --- src/PETScLinearSolvers.jl | 2 +- src/PETScNonlinearSolvers.jl | 20 ++------------------ src/PartitionedArrays.jl | 4 ++-- 3 files changed, 5 insertions(+), 21 deletions(-) diff --git a/src/PETScLinearSolvers.jl b/src/PETScLinearSolvers.jl index b79b007..babc0c9 100644 --- a/src/PETScLinearSolvers.jl +++ b/src/PETScLinearSolvers.jl @@ -83,7 +83,7 @@ function Algebra.solve!(x::PVector,ns::PETScLinearSolverNS,b::PVector) copy!(X,x) Y = convert(PETScVector,X) solve!(Y,ns,b) - _copy_and_exchange!(x,Y) + copy!(x,Y) end diff --git a/src/PETScNonlinearSolvers.jl b/src/PETScNonlinearSolvers.jl index cadbb3b..41676a6 100644 --- a/src/PETScNonlinearSolvers.jl +++ b/src/PETScNonlinearSolvers.jl @@ -146,22 +146,6 @@ function _setup_cache(x::AbstractVector,nls::PETScNonlinearSolver,op::NonlinearO jac_petsc_mat_A, jac_petsc_mat_A) end -# Helper private functions to implement the solve! methods below. -# It allows to execute the solve! methods below in a serial context, i.e., -# whenever -function _myexchange!(x::AbstractVector) - x -end -function _myexchange!(x::PVector) - exchange!(x) -end - -function _copy_and_exchange!(a::AbstractVector,b::PETScVector) - copy!(a,b) - _myexchange!(a) -end - - function Algebra.solve!(x::T, nls::PETScNonlinearSolver, op::NonlinearOperator, @@ -169,7 +153,7 @@ function Algebra.solve!(x::T, @assert cache.op === op @check_error_code PETSC.SNESSolve(cache.snes[],C_NULL,cache.x_petsc.vec[]) - _copy_and_exchange!(x,cache.x_petsc) + copy!(x,cache.x_petsc) cache end @@ -186,6 +170,6 @@ function Algebra.solve!(x::AbstractVector,nls::PETScNonlinearSolver,op::Nonlinea PETSC.SNESSetJacobian(cache.snes[],cache.jac_petsc_mat_A.mat[],cache.jac_petsc_mat_A.mat[],fptr,ctx) @check_error_code PETSC.SNESSolve(cache.snes[],C_NULL,cache.x_petsc.vec[]) - _copy_and_exchange!(x,cache.x_petsc) + copy!(x,cache.x_petsc) cache end diff --git a/src/PartitionedArrays.jl b/src/PartitionedArrays.jl index f4edb00..c77329b 100644 --- a/src/PartitionedArrays.jl +++ b/src/PartitionedArrays.jl @@ -187,7 +187,7 @@ function _petsc_matrix(a::PSparseMatrix,::MPIBackend) b end -function _copy!(petscmat::Mat,mat::PSparseMatrix{<:SequentialData}) +function _copy!(petscmat::Mat,mat::PSparseMatrix{T,<:SequentialData}) where {T} parts=get_part_ids(mat.values) map_parts(parts, mat.values,mat.rows.partition,mat.cols.partition) do part, lmat, rdofs, cdofs @check isa(rdofs,IndexRange) "Not supported partition for PETSc matrices" @@ -225,7 +225,7 @@ function _copy!(petscmat::Mat,mat::PSparseMatrix{<:SequentialData}) @check_error_code PETSC.MatAssemblyEnd(petscmat.ptr, PETSC.MAT_FINAL_ASSEMBLY) end -_copy!(::PSparseMatrix{<:SequentialData},::Mat) = @notimplemented +_copy!(::PSparseMatrix{T,<:SequentialData},::Mat) where T = @notimplemented _copy!(::PSparseMatrix{<:MPIData},::Mat) = @notimplemented function Base.copy!(petscmat::Mat,mat::PSparseMatrix{<:MPIData}) parts=get_part_ids(mat.values)