Skip to content

Commit

Permalink
Merge pull request idaholab#28923 from lynnmunday/fixTaoParamsVec
Browse files Browse the repository at this point in the history
Fix use of TAO solution vector
  • Loading branch information
lindsayad authored Oct 25, 2024
2 parents 0d384a3 + e62b0fc commit e96644b
Show file tree
Hide file tree
Showing 16 changed files with 20 additions and 202 deletions.
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
objective_name = objective_value
parameter_names = 'D'
num_values = '4'
initial_condition = '0.01 0.01 0.01 0.01'
initial_condition = '0.2 0.2 0.2 0.2'
[]

[Reporters]
Expand Down Expand Up @@ -51,7 +51,7 @@
[Executioner]
type = Optimize
tao_solver = taonm
petsc_options_iname = '-tao_gatol'
petsc_options_value = '1e-6'
petsc_options_iname = '-tao_gatol -tao_nm_lambda'
petsc_options_value = '1e-8 0.25'
verbose = true
[]
1 change: 1 addition & 0 deletions modules/optimization/examples/materialTransient/tests
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
type = Exodiff
input = optimize_nograd.i
exodiff = optimize_nograd_out_forward0.e
rel_err = 1e-2
prereq = material_transient/forward
method = '!dbg'
valgrind = none
Expand Down
102 changes: 0 additions & 102 deletions modules/optimization/examples/simpleTransient/gold/gradFree.csv

This file was deleted.

48 changes: 0 additions & 48 deletions modules/optimization/examples/simpleTransient/main.i

This file was deleted.

11 changes: 0 additions & 11 deletions modules/optimization/examples/simpleTransient/tests
Original file line number Diff line number Diff line change
Expand Up @@ -3,17 +3,6 @@
design = "ElementOptimizationSourceFunctionInnerProduct.md NearestReporterCoordinatesFunction.md ReporterTimePointSource.md"
issues = '#21885'
requirement = "The system shall be able to perform force-inversion optimization with time-dependent parameters and outputs with"
[objective]
type = CSVDiff
input = main.i
cli_args = "-tao_gatol 90 forward:Outputs/file_base=gradFree
OptimizationReporter/initial_condition='0.017248124297389537 -0.4383269038787613 -1.0120688493193575 -0.604005388481836 -1.4561640635027022 3.231744273062789 0.30497109596745786 0.44633697183648136 0.01016924485170484 2.102321678477381 -0.6633902660583816 0.8744951418636224 -1.6563188219465808 -0.7960335532123246 -0.8868197436891168 3.4789948727882036 1.221786635733348 -1.3649297188353513 0.6718141978617336 1.073784058368632 -0.20990475499085504 -0.23140796936323005 2.719922596118634 0.49032306047413676 0.5754881340320752 -1.4387408219638242 0.5868285452264156 -0.26563061173982583 0.7632228812045031 0.5847693390767825 1.2297597229219401 0.018872725738175458 1.3701093737026238 0.5124595468128872 0.388110595828123 0.11247112873355217 0.9234434408370418 0.3748330890983532 -0.1772288909293001 -0.9744206675334496 -0.3917172574619362 -0.2538603477404897 0.5517774216239373 0.07213835476751881'"
csvdiff = gradFree.csv
rel_err = 1e-4
heavy = true
method = OPT
detail = 'only a forward solve,'
[]
[gradient]
type = CSVDiff
input = main_gradient.i
Expand Down
2 changes: 1 addition & 1 deletion modules/optimization/include/executioners/OptimizeSolve.h
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ class OptimizeSolve : public SolveObject
/// Number of parameters being optimized
dof_id_type _ndof;

/// Parameters (solution)
/// Parameters (solution) given to TAO
std::unique_ptr<libMesh::PetscVector<Number>> _parameters;

/// Hessian (matrix) - usually a matrix-free representation
Expand Down
20 changes: 10 additions & 10 deletions modules/optimization/src/executioners/OptimizeSolve.C
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ OptimizeSolve::solve()
_obj_function = &_problem.getUserObject<OptimizationReporterBase>("OptimizationReporter");

// Initialize solution and matrix
_obj_function->setInitialCondition(*_parameters.get());
_obj_function->setInitialCondition(*_parameters);
_ndof = _parameters->size();

// time step defaults 1, we want to start at 0 for first iteration to be
Expand Down Expand Up @@ -357,9 +357,10 @@ OptimizeSolve::objectiveFunctionWrapper(Tao /*tao*/, Vec x, Real * objective, vo
auto * solver = static_cast<OptimizeSolve *>(ctx);

libMesh::PetscVector<Number> param(x, solver->_my_comm);
*solver->_parameters = param;
solver->_parameters->swap(param);

(*objective) = solver->objectiveFunction();
solver->_parameters->swap(param);
PetscFunctionReturn(PETSC_SUCCESS);
}

Expand All @@ -371,26 +372,25 @@ OptimizeSolve::objectiveAndGradientFunctionWrapper(
auto * solver = static_cast<OptimizeSolve *>(ctx);

libMesh::PetscVector<Number> param(x, solver->_my_comm);
*solver->_parameters = param;
solver->_parameters->swap(param);

(*objective) = solver->objectiveFunction();

libMesh::PetscVector<Number> grad(gradient, solver->_my_comm);

solver->gradientFunction(grad);
solver->_parameters->swap(param);
PetscFunctionReturn(PETSC_SUCCESS);
}

PetscErrorCode
OptimizeSolve::hessianFunctionWrapper(Tao /*tao*/, Vec x, Mat /*hessian*/, Mat /*pc*/, void * ctx)
OptimizeSolve::hessianFunctionWrapper(
Tao /*tao*/, Vec /*x*/, Mat /*hessian*/, Mat /*pc*/, void * ctx)
{
PetscFunctionBegin;
// Define Hessian-vector multiplication routine
auto * solver = static_cast<OptimizeSolve *>(ctx);
libMesh::PetscVector<Number> param(x, solver->_my_comm);
*solver->_parameters = param;
PetscErrorCode ierr = MatShellSetOperation(
solver->_hessian, MATOP_MULT, (void (*)(void))OptimizeSolve::applyHessianWrapper);

CHKERRQ(ierr);
PetscFunctionReturn(PETSC_SUCCESS);
}
Expand Down Expand Up @@ -424,7 +424,7 @@ Real
OptimizeSolve::objectiveFunction()
{
TIME_SECTION("objectiveFunction", 2, "Objective forward solve");
_obj_function->updateParameters(*_parameters.get());
_obj_function->updateParameters(*_parameters);

Moose::PetscSupport::petscSetOptions(_petsc_options, _solver_params);
_problem.execute(OptimizationAppTypes::EXEC_FORWARD);
Expand All @@ -447,7 +447,7 @@ void
OptimizeSolve::gradientFunction(libMesh::PetscVector<Number> & gradient)
{
TIME_SECTION("gradientFunction", 2, "Gradient adjoint solve");
_obj_function->updateParameters(*_parameters.get());
_obj_function->updateParameters(*_parameters);

Moose::PetscSupport::petscSetOptions(_petsc_options, _solver_params);
_problem.execute(OptimizationAppTypes::EXEC_ADJOINT);
Expand Down
18 changes: 0 additions & 18 deletions modules/optimization/test/tests/executioners/basic_optimize/tests
Original file line number Diff line number Diff line change
Expand Up @@ -68,23 +68,5 @@
max_threads = 1 # Optimize executioner does not support multiple threads
detail = 'testing hand-coded gradient;'
[]
[failed_ls]
type = RunApp
input = quadratic_minimize.i
cli_args = '-tao_fd_gradient true'
allow_test_objects = True
expect_out = 'Solver terminated: -6 Line Search Failure'
max_threads = 1 # Optimize executioner does not support multiple threads
detail = 'indicating a failed line search;'
[]
[failed_it]
type = RunApp
input = quadratic_minimize.i
cli_args = '-tao_fd_gradient true -tao_ls_type unit -tao_max_it 1'
allow_test_objects = True
expect_out = 'Solver terminated: -2 Maximum Iterations'
max_threads = 1 # Optimize executioner does not support multiple threads
detail = 'indicating maximum iteration reached;'
[]
[]
[]
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
type = Optimize
tao_solver = taoblmvm
petsc_options_iname = '-tao_gatol'
petsc_options_value = '0.0001'
petsc_options_value = '1e-4'
verbose = true
[]

Expand All @@ -48,7 +48,7 @@
[]
[adjoint]
type = FullSolveMultiApp
input_files = adjoint.i
input_files = adjoint_iteration_output.i
execute_on = "ADJOINT"
clone_parent_mesh = true
[]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
[MultiApps]
[forward]
type = FullSolveMultiApp
input_files = forward_and_adjoint.i
input_files = forward_and_adjoint_iteration_output.i
execute_on = "FORWARD"
[]
[]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,24 +11,20 @@
abs_zero = 1.0e-5
# steady solve
recover = false
issues = '#25009'
design = 'ExodusOptimizationSteady.md'
cli_args = "MultiApps/adjoint/input_files='adjoint_iteration_output.i'"
requirement = 'The system shall be able to perform gradient based material parameter inversion '
'for a single material property and output the iteration-wise output for the '
'for a single material property and output the iteration-wise exodus output for the '
'adjoint problem.'
[]
[auto_adjoint_iteration_output]
type = Exodiff
input = main_auto_adjoint.i
cli_args = 'MultiApps/forward/input_files=forward_and_adjoint_iteration_output.i'
exodiff = 'main_auto_adjoint_out_forward0_exodus.e'
# Optimize executioner does not support multiple threads
max_threads = 1
# steady solve
recover = false
requirement = 'The system shall be able to invert for point loads using gradient-based '
'optimization with an automatically computed adjoint and output the exodus output '
'per iteration.'
'per iteration for the combined forward and adjoint problem variables.'
[]
[]

0 comments on commit e96644b

Please sign in to comment.