From 5ce003c4e2870196340835e3a5b6b9ff4ad09d5f Mon Sep 17 00:00:00 2001 From: Cheng Gong Date: Mon, 28 Oct 2024 21:17:00 -0400 Subject: [PATCH 1/3] replace `map` explicitly by a loop --- Project.toml | 10 +++++++--- src/core/control.jl | 5 ++++- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/Project.toml b/Project.toml index af7a5ba..f07a7a9 100644 --- a/Project.toml +++ b/Project.toml @@ -4,11 +4,15 @@ authors = ["Mathieu Morlighem ", "Gong Cheng Date: Mon, 4 Nov 2024 09:47:11 -0500 Subject: [PATCH 2/3] remove unused return --- src/core/control.jl | 2 +- src/core/modules.jl | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/src/core/control.jl b/src/core/control.jl index d1f2b8b..10d2fa5 100644 --- a/src/core/control.jl +++ b/src/core/control.jl @@ -61,7 +61,7 @@ end#}}} # cost function handler for autodiff function costfunction(α::Vector{Float64}, femmodel::FemModel) #{{{ - # get the md.inversion.control_string + # get the md.inversion.independent_string control_string = FindParam(String, femmodel.parameters, InversionControlParametersEnum) # get the Enum controlvar_enum = StringToEnum(control_string) diff --git a/src/core/modules.jl b/src/core/modules.jl index f9bec2c..1f76f9a 100644 --- a/src/core/modules.jl +++ b/src/core/modules.jl @@ -277,8 +277,7 @@ function InputUpdateFromSolutionx(analysis::Analysis,ug::IssmVector,femmodel::Fe InputUpdateFromSolution(analysis,ug.vector,femmodel.elements[i]) end - return ug - + return Nothing end#}}} function InputUpdateFromVectorx(femmodel::FemModel, vector::Vector{Float64}, enum::IssmEnum, layout::IssmEnum)# {{{ From d016b8ea3aaa9f4dcba0fb0c2c9f1887977a90c9 Mon Sep 17 00:00:00 2001 From: Cheng Gong Date: Wed, 6 Nov 2024 15:48:24 -0500 Subject: [PATCH 3/3] fix bug upgrading to Enzyme 0.13, add run time activity in autodiff --- src/core/control.jl | 2 +- test/testoptimization.jl | 9 +++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/src/core/control.jl b/src/core/control.jl index 10d2fa5..4a6df06 100644 --- a/src/core/control.jl +++ b/src/core/control.jl @@ -29,7 +29,7 @@ function ComputeGradient(∂J_∂α::Vector{Float64}, α::Vector{Float64}, femmo # zero ALL depth of the model, make sure we get correct gradient dfemmodel = Enzyme.Compiler.make_zero(Base.Core.Typeof(femmodel), IdDict(), femmodel) # compute the gradient - autodiff(Enzyme.Reverse, costfunction, Active, Duplicated(α, ∂J_∂α), Duplicated(femmodel,dfemmodel)) + autodiff(set_runtime_activity(Enzyme.Reverse), costfunction, Active, Duplicated(α, ∂J_∂α), Duplicated(femmodel,dfemmodel)) end#}}} function CostFunctionx(femmodel::FemModel, α::Vector{Float64}, controlvar_enum::IssmEnum, SId_enum::IssmEnum, cost_enum_list::Vector{IssmEnum}, ::Val{solutionstring}) where solutionstring #{{{ #Update FemModel accordingly diff --git a/test/testoptimization.jl b/test/testoptimization.jl index 25b35f9..55c9c6f 100755 --- a/test/testoptimization.jl +++ b/test/testoptimization.jl @@ -29,14 +29,15 @@ md.inversion.dependent_string = ["SurfaceAbsVelMisfit"] femmodel=DJUICE.ModelProcessor(md, :StressbalanceSolution) n = length(α) +DJUICE.costfunction(α, femmodel) # test Enzyme autodiff only dfemmodel = Enzyme.Compiler.make_zero(Base.Core.Typeof(femmodel), IdDict(), femmodel) -autodiff(Enzyme.Reverse, DJUICE.costfunction, Active, Duplicated(α, ∂J_∂α), Duplicated(femmodel,dfemmodel)) +autodiff(set_runtime_activity(Enzyme.Reverse), DJUICE.costfunction, Active, Duplicated(α, ∂J_∂α), Duplicated(femmodel,dfemmodel)) # use user defined grad, errors! -optprob = OptimizationFunction(DJUICE.costfunction, Optimization.AutoEnzyme()) +#optprob = OptimizationFunction(DJUICE.costfunction, Optimization.AutoEnzyme()) #prob = Optimization.OptimizationProblem(optprob, α, femmodel, lb=md.inversion.min_parameters, ub=md.inversion.max_parameters) -prob = Optimization.OptimizationProblem(optprob, α, femmodel) -sol = Optimization.solve(prob, Optimization.LBFGS()) +#prob = Optimization.OptimizationProblem(optprob, α, femmodel) +#sol = Optimization.solve(prob, Optimization.LBFGS()) #sol = Optimization.solve(prob, Optim.GradientDescent()) #sol = Optimization.solve(prob, Optim.NelderMead())