Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add ice volume calculation #15

Merged
merged 7 commits into from
Jun 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 5 additions & 9 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,6 @@ Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9"
Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c"
GLMakie = "e9467ef8-e4e7-5192-8a1a-b1aee30e663a"
MAT = "23992714-dd62-5051-b70f-ba57cb901cac"
Optimization = "7f7a1694-90dd-40f0-9382-eb1efda571ba"
OptimizationOptimJL = "36348300-93cb-4f02-beb5-3c3902f8871e"
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7"
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
StatsBase = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91"
Expand All @@ -19,14 +17,12 @@ Triangulate = "f7e6ffb2-c36d-4f8f-a77e-16e897189344"
[compat]
ColorSchemes = "3.25"
Enzyme = "0.12"
Flux = "0.14.15"
GLMakie = "0.9.11"
MAT = "0.10.7"
Optimization = "3.25.1"
OptimizationOptimJL = "0.3.1"
Flux = "0.14"
GLMakie = "0.10"
MAT = "0.10"
SparseArrays = "1.8.0"
StatsBase = "0.34.3"
Triangulate = "2.3.2"
StatsBase = "0.34"
Triangulate = "2.3"
julia = "1.8"

[extras]
Expand Down
2 changes: 2 additions & 0 deletions src/DJUICE.jl
Original file line number Diff line number Diff line change
Expand Up @@ -23,5 +23,7 @@ include("$coredir/solve.jl")
export solve
include("$userdir/plotmodel.jl")
export plotmodel
include("$userdir/dataprocess.jl")
export IntegrateOverDomain, GetAreas

end
89 changes: 60 additions & 29 deletions src/core/control.jl
Original file line number Diff line number Diff line change
@@ -1,43 +1,74 @@
using Enzyme
Enzyme.API.looseTypeAnalysis!(false)
Enzyme.API.strictAliasing!(false)
Enzyme.API.typeWarning!(false)
Enzyme.Compiler.RunAttributor[] = false

using Optimization, OptimizationOptimJL
#using Optimization, OptimizationOptimJL


function Control_Core(md::model, femmodel::FemModel) #{{{
# solve for optimization
# TODO: just a first try, need to add all the features
α = md.inversion.independent
∂J_∂α = zero(α)
n = length(α)
# use user defined grad, errors!
#optprob = OptimizationFunction(costfunction, Optimization.AutoEnzyme(), grad=computeGradient(∂J_∂α, α, femmodel))
optprob = OptimizationFunction(costfunction, Optimization.AutoEnzyme())
prob = Optimization.OptimizationProblem(optprob, α, femmodel, lb=md.inversion.min_parameters, ub=md.inversion.max_parameters)
sol = Optimization.solve(prob, Optim.LBFGS())

independent_enum = StringToEnum(md.inversion.independent_string)
InputUpdateFromVectorx(femmodel, sol.u, independent_enum, VertexSIdEnum)
RequestedOutputsx(femmodel, [independent_enum])
end#}}}
function computeGradient(md::model, femmodel::FemModel) #{{{
function Control_Core(md::model, femmodel::FemModel, solution::Symbol) #{{{
#independent variable
α = md.inversion.independent
#initialize derivative as 0
∂J_∂α = zero(α)
# Compute Gradient
computeGradient(∂J_∂α, α, femmodel)

#Put gradient in results
InputUpdateFromVectorx(femmodel, ∂J_∂α, GradientEnum, VertexSIdEnum)
RequestedOutputsx(femmodel, [GradientEnum])
if solution ===:grad
# only compute the gradient
ComputeGradient(∂J_∂α, α, femmodel)
#Put gradient in results
InputUpdateFromVectorx(femmodel, ∂J_∂α, GradientEnum, VertexSIdEnum)
RequestedOutputsx(femmodel, [GradientEnum])
else
# optimization
# use user defined grad, errors!
# optprob = OptimizationFunction(costfunction, Optimization.AutoEnzyme())
# prob = Optimization.OptimizationProblem(optprob, α, femmodel, lb=md.inversion.min_parameters, ub=md.inversion.max_parameters)
# sol = Optimization.solve(prob, Optim.LBFGS())
independent_enum = StringToEnum(md.inversion.independent_string)
InputUpdateFromVectorx(femmodel, sol.u, independent_enum, VertexSIdEnum)
RequestedOutputsx(femmodel, [independent_enum])
end
end#}}}
function computeGradient(∂J_∂α::Vector{Float64}, α::Vector{Float64}, femmodel::FemModel) #{{{

function ComputeGradient(∂J_∂α::Vector{Float64}, α::Vector{Float64}, femmodel::FemModel) #{{{
# zero ALL depth of the model, make sure we get correct gradient
dfemmodel = Enzyme.Compiler.make_zero(Base.Core.Typeof(femmodel), IdDict(), femmodel)
# compute the gradient
autodiff(Enzyme.Reverse, costfunction, Duplicated(α, ∂J_∂α), Duplicated(femmodel,dfemmodel))
autodiff(Enzyme.Reverse, costfunction, Active, Duplicated(α, ∂J_∂α), Duplicated(femmodel,dfemmodel))
end#}}}
function CostFunctionx(femmodel::FemModel, α::Vector{Float64}, controlvar_enum::IssmEnum, SId_enum::IssmEnum, cost_enum_list::Vector{IssmEnum}) #{{{
#Update FemModel accordingly
InputUpdateFromVectorx(femmodel, α, controlvar_enum, SId_enum)

#solve PDE
analysis = StressbalanceAnalysis()
Core(analysis, femmodel)

#update all values of the cost functions
RequestedOutputsx(femmodel, cost_enum_list)

#Compute cost function
J = 0.0
for i in 1:length(cost_enum_list)
J += femmodel.results[end-i+1].value
end

#return cost function
return J
end#}}}

# cost function handler for autodiff
function costfunction(α::Vector{Float64}, femmodel::FemModel) #{{{
# get the md.inversion.control_string
control_string = FindParam(String, femmodel.parameters, InversionControlParametersEnum)
# get the Enum
controlvar_enum = StringToEnum(control_string)
if isnothing(controlvar_enum)
error(control_string, " is not defined in DJUICE, therefore the derivative with respect to ", control_string, " is meaningless")
end

# get the cost function list from md.inversion.dependent_string
cost_list = FindParam(Vector{String}, femmodel.parameters, InversionCostFunctionsEnum)
cost_enum_list = map(StringToEnum, cost_list)

# compute cost function
# TODO: loop through all controls with respect to all the components in the cost function
CostFunctionx(femmodel, α, controlvar_enum, VertexSIdEnum, cost_enum_list)
end#}}}
36 changes: 6 additions & 30 deletions src/core/costfunctions.jl
Original file line number Diff line number Diff line change
@@ -1,33 +1,4 @@

function costfunction(α::Vector{Float64}, femmodel::FemModel) #{{{
# get the md.inversion.control_string
control_string = FindParam(String, femmodel.parameters, InversionControlParametersEnum)
# get the Enum
controlvar_enum = StringToEnum(control_string)
if isnothing(controlvar_enum)
error(control_string, " is not defined in DJUICE, therefore the derivative with respect to ", control_string, " is meaningless")
end
# compute cost function
# TODO: loop through all controls with respect to all the components in the cost function
costfunction(α, femmodel, controlvar_enum, VertexSIdEnum)
end#}}}
function costfunction(α::Vector{Float64}, femmodel::FemModel, controlvar_enum::IssmEnum, SId_enum::IssmEnum) #{{{
#Update FemModel accordingly
InputUpdateFromVectorx(femmodel, α, controlvar_enum, SId_enum)

#solve PDE
analysis = StressbalanceAnalysis()
Core(analysis, femmodel)

#Compute cost function
J = SurfaceAbsVelMisfitx(femmodel)

#J += ControlVariableAbsGradientx(femmodel, α, controlvar_enum)

#return cost function
return J
end#}}}

# The misfit functions
function SurfaceAbsVelMisfitx(femmodel::FemModel) #{{{

Expand Down Expand Up @@ -67,7 +38,12 @@ function SurfaceAbsVelMisfitx(femmodel::FemModel) #{{{

return J
end#}}}
function ControlVariableAbsGradientx(femmodel::FemModel, α::Vector{Float64}, controlvar_enum::IssmEnum) #{{{
function ControlVariableAbsGradientx(femmodel::FemModel) #{{{

# get the md.inversion.control_string
control_string = FindParam(String, femmodel.parameters, InversionControlParametersEnum)
# get the Enum
controlvar_enum = StringToEnum(control_string)

#Initialize output
J = 0.0
Expand Down
Loading
Loading