LCOV - code coverage report
Current view: top level - src/executioners - AdjointSolve.C (source / functions) Hit Total Coverage
Test: idaholab/moose optimization: #31405 (292dce) with base fef103 Lines: 79 93 84.9 %
Date: 2025-09-04 07:54:57 Functions: 6 6 100.0 %
Legend: Lines: hit not hit

          Line data    Source code
       1             : //* This file is part of the MOOSE framework
       2             : //* https://mooseframework.inl.gov
       3             : //*
       4             : //* All rights reserved, see COPYRIGHT for full restrictions
       5             : //* https://github.com/idaholab/moose/blob/master/COPYRIGHT
       6             : //*
       7             : //* Licensed under LGPL 2.1, please see LICENSE for details
       8             : //* https://www.gnu.org/licenses/lgpl-2.1.html
       9             : 
      10             : #include "AdjointSolve.h"
      11             : #include "OptimizationAppTypes.h"
      12             : 
      13             : #include "FEProblem.h"
      14             : #include "NonlinearSystemBase.h"
      15             : #include "NonlinearSystem.h"
      16             : #include "NodalBCBase.h"
      17             : #include "Executioner.h"
      18             : 
      19             : #include "libmesh/fuzzy_equals.h"
      20             : #include "libmesh/petsc_matrix.h"
      21             : #include "libmesh/petsc_vector.h"
      22             : #include "petscmat.h"
      23             : 
      24             : using namespace libMesh;
      25             : 
      26             : InputParameters
      27         750 : AdjointSolve::validParams()
      28             : {
      29         750 :   InputParameters params = emptyInputParameters();
      30        1500 :   params.addRequiredParam<std::vector<SolverSystemName>>(
      31             :       "forward_system",
      32             :       "Name of the nonlinear system representing the forward problem. Multiple and linear solver "
      33             :       "systems are not currently supported.");
      34        1500 :   params.addRequiredParam<NonlinearSystemName>(
      35             :       "adjoint_system", "Name of the system representing the adjoint problem.");
      36         750 :   return params;
      37           0 : }
      38             : 
      39         375 : AdjointSolve::AdjointSolve(Executioner & ex)
      40             :   : SolveObject(ex),
      41         375 :     _forward_sys_num(
      42         375 :         _problem.nlSysNum(getParam<std::vector<SolverSystemName>>("forward_system")[0])),
      43         750 :     _adjoint_sys_num(_problem.nlSysNum(getParam<NonlinearSystemName>("adjoint_system"))),
      44         375 :     _nl_forward(_problem.getNonlinearSystemBase(_forward_sys_num)),
      45         375 :     _nl_adjoint(_problem.getNonlinearSystemBase(_adjoint_sys_num))
      46             : {
      47             :   // Disallow vectors of systems
      48         750 :   if (getParam<std::vector<SolverSystemName>>("forward_system").size() != 1)
      49           0 :     paramError("forward_system",
      50             :                "Multiple nonlinear forward systems is not supported at the moment");
      51             : 
      52             :   // These should never be hit, but just in case
      53         375 :   if (!dynamic_cast<NonlinearSystem *>(&_nl_forward))
      54           0 :     paramError("forward_system", "Forward system does not appear to be a 'NonlinearSystem'.");
      55         375 :   if (!dynamic_cast<NonlinearSystem *>(&_nl_adjoint))
      56           0 :     paramError("adjoint_system", "Adjoint system does not appear to be a 'NonlinearSystem'.");
      57             :   // Adjoint system should never perform its own automatic scaling. Scaling factors from the forward
      58             :   // system are applied.
      59             :   _nl_adjoint.automaticScaling(false);
      60             : 
      61             :   // We need to force the forward system to have a scaling vector. This is
      62             :   // in case a user provides scaling for an individual variables but doesn't have any
      63             :   // AD objects.
      64         375 :   _nl_forward.addScalingVector();
      65             : 
      66             :   // Set the solver options for the adjoint system
      67             :   mooseAssert(_problem.numSolverSystems() > 1,
      68             :               "We should have forward and adjoint systems as evidenced by our initialization list");
      69         375 :   const auto prefix = _nl_adjoint.prefix();
      70         375 :   Moose::PetscSupport::storePetscOptions(_problem, prefix, ex);
      71         375 :   Moose::PetscSupport::setConvergedReasonFlags(_problem, prefix);
      72             :   // Set solver parameter prefix
      73         375 :   auto & solver_params = _problem.solverParams(_nl_adjoint.number());
      74         375 :   solver_params._prefix = prefix;
      75         375 :   solver_params._solver_sys_num = _nl_adjoint.number();
      76         375 : }
      77             : 
      78             : bool
      79        7596 : AdjointSolve::solve()
      80             : {
      81       15192 :   TIME_SECTION("execute", 1, "Executing adjoint problem", false);
      82             : 
      83             :   mooseAssert(!_inner_solve,
      84             :               "I don't see any code path in which this class winds up with an inner solve");
      85             : 
      86             :   // enforce PETSc options are passed to the adjoint solve as well, as set in the user file
      87        7596 :   auto & petsc_options = _problem.getPetscOptions();
      88        7596 :   auto & pars = _problem.solverParams(_nl_adjoint.number());
      89        7596 :   Moose::PetscSupport::petscSetOptions(petsc_options, pars);
      90             : 
      91        7596 :   _problem.execute(OptimizationAppTypes::EXEC_ADJOINT_TIMESTEP_BEGIN);
      92             : 
      93       15192 :   if (!_problem.execMultiApps(OptimizationAppTypes::EXEC_ADJOINT_TIMESTEP_BEGIN))
      94             :   {
      95           0 :     _console << "MultiApps failed to converge on ADJOINT_TIMESTEP_BEGIN!" << std::endl;
      96           0 :     return false;
      97             :   }
      98             :   // Output results between the forward and adjoint solve.
      99        7596 :   _problem.outputStep(OptimizationAppTypes::EXEC_ADJOINT_TIMESTEP_BEGIN);
     100             : 
     101        7596 :   checkIntegrity();
     102             : 
     103             :   // Convenient references
     104             :   // Adjoint matrix, solution, and right-hand-side
     105        7594 :   auto & matrix = static_cast<ImplicitSystem &>(_nl_forward.system()).get_system_matrix();
     106        7594 :   auto & solution = _nl_adjoint.solution();
     107        7594 :   auto & rhs = _nl_adjoint.getResidualNonTimeVector();
     108             :   // Linear solver parameters
     109        7594 :   auto & es = _problem.es();
     110        7594 :   const auto tol = es.parameters.get<Real>("linear solver tolerance");
     111        7594 :   const auto maxits = es.parameters.get<unsigned int>("linear solver maximum iterations");
     112             :   // Linear solver for adjoint system
     113        7594 :   auto & solver = *static_cast<ImplicitSystem &>(_nl_adjoint.system()).get_linear_solver();
     114             : 
     115             :   // Assemble adjoint system by evaluating the forward Jacobian, computing the adjoint
     116             :   // residual/source, and homogenizing nodal BCs
     117        7594 :   assembleAdjointSystem(matrix, solution, rhs);
     118        7594 :   applyNodalBCs(matrix, solution, rhs);
     119             : 
     120             :   // Solve the adjoint system
     121        7594 :   solver.adjoint_solve(matrix, solution, rhs, tol, maxits);
     122             : 
     123             :   // For scaling of the forward problem we need to apply correction factor
     124        7594 :   solution *= _nl_forward.getVector("scaling_factors");
     125             : 
     126        7594 :   _nl_adjoint.update();
     127        7594 :   if (solver.get_converged_reason() < 0)
     128             :   {
     129           0 :     _console << "Adjoint solve failed to converge with reason: "
     130           0 :              << Utility::enum_to_string(solver.get_converged_reason()) << std::endl;
     131           0 :     return false;
     132             :   }
     133             : 
     134        7594 :   _problem.execute(OptimizationAppTypes::EXEC_ADJOINT_TIMESTEP_END);
     135       15188 :   if (!_problem.execMultiApps(OptimizationAppTypes::EXEC_ADJOINT_TIMESTEP_END))
     136             :   {
     137           0 :     _console << "MultiApps failed to converge on ADJOINT_TIMESTEP_END!" << std::endl;
     138           0 :     return false;
     139             :   }
     140             : 
     141             :   return true;
     142             : }
     143             : 
     144             : void
     145        7594 : AdjointSolve::assembleAdjointSystem(SparseMatrix<Number> & matrix,
     146             :                                     const NumericVector<Number> & /*solution*/,
     147             :                                     NumericVector<Number> & rhs)
     148             : {
     149             : 
     150        7594 :   _problem.computeJacobian(*_nl_forward.currentSolution(), matrix, _forward_sys_num);
     151             : 
     152        7594 :   _problem.setCurrentNonlinearSystem(_adjoint_sys_num);
     153        7594 :   _problem.computeResidualTag(*_nl_adjoint.currentSolution(), rhs, _nl_adjoint.nonTimeVectorTag());
     154             : 
     155        7594 :   rhs.scale(-1.0);
     156        7594 : }
     157             : 
     158             : void
     159        7594 : AdjointSolve::applyNodalBCs(SparseMatrix<Number> & matrix,
     160             :                             const NumericVector<Number> & solution,
     161             :                             NumericVector<Number> & rhs)
     162             : {
     163             :   std::vector<dof_id_type> nbc_dofs;
     164        7594 :   auto & nbc_warehouse = _nl_forward.getNodalBCWarehouse();
     165        7594 :   if (nbc_warehouse.hasActiveObjects())
     166             :   {
     167      297968 :     for (const auto & bnode : *_mesh.getBoundaryNodeRange())
     168             :     {
     169      290704 :       BoundaryID boundary_id = bnode->_bnd_id;
     170      290704 :       Node * node = bnode->_node;
     171             : 
     172      290704 :       if (!nbc_warehouse.hasActiveBoundaryObjects(boundary_id) ||
     173      155869 :           node->processor_id() != processor_id())
     174      162742 :         continue;
     175             : 
     176      257112 :       for (const auto & bc : nbc_warehouse.getActiveBoundaryObjects(boundary_id))
     177      129150 :         if (bc->shouldApply())
     178      258498 :           for (unsigned int c = 0; c < bc->variable().count(); ++c)
     179      129348 :             nbc_dofs.push_back(node->dof_number(_forward_sys_num, bc->variable().number() + c, 0));
     180             :     }
     181             : 
     182             :     // Petsc has a nice interface for zeroing rows and columns, so we'll use it
     183        7264 :     auto petsc_matrix = dynamic_cast<PetscMatrix<Number> *>(&matrix);
     184        7264 :     auto petsc_solution = dynamic_cast<const PetscVector<Number> *>(&solution);
     185        7264 :     auto petsc_rhs = dynamic_cast<PetscVector<Number> *>(&rhs);
     186        7264 :     if (petsc_matrix && petsc_solution && petsc_rhs)
     187        7264 :       LibmeshPetscCall(MatZeroRowsColumns(petsc_matrix->mat(),
     188             :                                           cast_int<PetscInt>(nbc_dofs.size()),
     189             :                                           numeric_petsc_cast(nbc_dofs.data()),
     190             :                                           1.0,
     191             :                                           petsc_solution->vec(),
     192             :                                           petsc_rhs->vec()));
     193             :     else
     194           0 :       mooseError("Using PETSc matrices and vectors is required for applying homogenized boundary "
     195             :                  "conditions.");
     196             :   }
     197        7594 : }
     198             : 
     199             : void
     200        7596 : AdjointSolve::checkIntegrity()
     201             : {
     202        7596 :   const auto adj_vars = _nl_adjoint.getVariables(0);
     203       15604 :   for (const auto & adj_var : adj_vars)
     204             :     // If the user supplies any scaling factors for individual variables the
     205             :     // adjoint system won't be consistent.
     206        8010 :     if (!absolute_fuzzy_equals(adj_var->scalingFactor(), 1.0))
     207           2 :       mooseError(
     208             :           "User cannot supply scaling factors for adjoint variables.   Adjoint system is scaled "
     209             :           "automatically by the forward system.");
     210             : 
     211             :   // This is to prevent automatic scaling of the adjoint system. Scaling is
     212             :   // taken from the forward system
     213       15188 :   if (_nl_adjoint.hasVector("scaling_factors"))
     214           0 :     _nl_adjoint.removeVector("scaling_factors");
     215             : 
     216             :   // Main thing is that the number of dofs in each system is the same
     217        7594 :   if (_nl_forward.system().n_dofs() != _nl_adjoint.system().n_dofs())
     218           0 :     mooseError(
     219             :         "The forward and adjoint systems do not seem to be the same size. This could be due to (1) "
     220             :         "the number of variables added to each system is not the same, (2) variables do not have "
     221             :         "consistent family/order, (3) variables do not have the same block restriction.");
     222        7594 : }

Generated by: LCOV version 1.14