www.mooseframework.org
ExternalPETScProblem.C
Go to the documentation of this file.
1 //* This file is part of the MOOSE framework
2 //* https://www.mooseframework.org
3 //*
4 //* All rights reserved, see COPYRIGHT for full restrictions
5 //* https://github.com/idaholab/moose/blob/master/COPYRIGHT
6 //*
7 //* Licensed under LGPL 2.1, please see LICENSE for details
8 //* https://www.gnu.org/licenses/lgpl-2.1.html
9 
10 #include "ExternalPETScProblem.h"
11 #include "SystemBase.h"
12 #include "libmesh/petsc_vector.h"
13 
14 registerMooseObject("ExternalPetscSolverApp", ExternalPETScProblem);
15 
18 {
20  params.addRequiredParam<VariableName>("sync_variable",
21  "The variable PETSc external solution will be synced to");
22  return params;
23 }
24 
26  : ExternalProblem(params),
27  _sync_to_var_name(getParam<VariableName>("sync_variable")),
28  // Require ExternalPetscSolverApp
29  _external_petsc_app(static_cast<ExternalPetscSolverApp &>(_app)),
30  _ts(_external_petsc_app.getPetscTS()),
31  // RestartableData is required for recovering when PETSc solver runs as a master app
32  _petsc_sol(declareRestartableData<Vec>("petsc_sol")),
33  _petsc_sol_old(declareRestartableData<Vec>("petsc_sol_old")),
34  _petsc_udot(declareRestartableData<Vec>("petsc_udot"))
35 {
36  DM da;
37  TSGetDM(_ts, &da);
38  // Create a global solution vector
39  DMCreateGlobalVector(da, &_petsc_sol);
40  // This is required because libMesh incorrectly treats the PETSc parallel vector as a ghost vector
41  // We should be able to remove this line of code once libMesh is updated
42  VecMPISetGhost(_petsc_sol, 0, nullptr);
43  // The solution at the previous time step
44  VecDuplicate(_petsc_sol, &_petsc_sol_old);
45  // Udot
46  VecDuplicate(_petsc_sol, &_petsc_udot);
47  // RHS
48  VecDuplicate(_petsc_sol, &_petsc_rhs);
49  // Form an initial condition
51  VecCopy(_petsc_sol, _petsc_sol_old);
52  VecSet(_petsc_udot, 0);
53 }
54 
56 {
57  // Destroy all handles of external Petsc solver
58  VecDestroy(&_petsc_sol);
59  VecDestroy(&_petsc_sol_old);
60  VecDestroy(&_petsc_udot);
61  VecDestroy(&_petsc_rhs);
62 }
63 
64 void
66 {
67  _console << "PETSc External Solve!" << std::endl;
68  // "_petsc_sol_old" is the solution of the current time step, and "_petsc_sol" will be updated
69  // to store the solution of the next time step after this call.
70  // This call advances a time step so that there is an opportunity to
71  // exchange information with MOOSE simulations.
73 }
74 
75 // This function is called when MOOSE time stepper actually moves to the next time step
76 // "PostTimeStep" may not be called for certain cases (e.g., auto_advance=false)
77 void
79 {
81  // Compute udot using a backward Euler method
82  // If the external code uses a different method,
83  // udot should be retrieved from the external solver
84  VecCopy(_petsc_sol, _petsc_udot);
85  VecAXPY(_petsc_udot, -1., _petsc_sol_old);
86  VecScale(_petsc_udot, 1. / dt());
87  // Save current solution because we are moving to the next time step
88  VecCopy(_petsc_sol, _petsc_sol_old);
89 }
90 
91 Real
93 {
94  TSComputeIFunction(_ts, time(), _petsc_sol, _petsc_udot, _petsc_rhs, PETSC_FALSE);
95  PetscReal norm;
96  VecNorm(_petsc_rhs, NORM_2, &norm);
97 
98  return norm;
99 }
100 
101 void
103 {
104  if (direction == Direction::FROM_EXTERNAL_APP)
105  {
106  _console << "syncSolutions from external petsc App" << std::endl;
107  DM da;
108  // xs: start grid point in x direction on local
109  // ys: start grid point in y direciton on local
110  // xm: number of grid points in x direciton on local
111  // ym: number of grid points in y direction on local
112  // Mx: number of grid points in x direction on all processors
113  PetscInt i, j, xs, ys, xm, ym, Mx;
114  PetscScalar ** _petsc_sol_array;
115  TSGetDM(_ts, &da);
116  DMDAGetInfo(da,
117  PETSC_IGNORE,
118  &Mx,
119  PETSC_IGNORE,
120  PETSC_IGNORE,
121  PETSC_IGNORE,
122  PETSC_IGNORE,
123  PETSC_IGNORE,
124  PETSC_IGNORE,
125  PETSC_IGNORE,
126  PETSC_IGNORE,
127  PETSC_IGNORE,
128  PETSC_IGNORE,
129  PETSC_IGNORE);
130  DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL);
131  DMDAVecGetArray(da, _petsc_sol, &_petsc_sol_array);
132 
133  // Take the solution from PETSc, and sync it to one MOOSE variable
134  // We currently support one variable only but it is straightforward
135  // to have multiple moose variables
136  MeshBase & to_mesh = mesh().getMesh();
137  auto & sync_to_var = getVariable(
138  0, _sync_to_var_name, Moose::VarKindType::VAR_ANY, Moose::VarFieldType::VAR_FIELD_STANDARD);
139 
140  for (j = ys; j < ys + ym; j++)
141  for (i = xs; i < xs + xm; i++)
142  {
143  Node * to_node = to_mesh.node_ptr(i + j * Mx);
144  // For the current example, we need to update only one variable.
145  // This line of code is used to make sure users won't make a mistake in the demo input file.
146  // If multiple variables need to be transfered for some use cases, users should
147  // loop over variables and copy necessary data.
148  if (to_node->n_comp(sync_to_var.sys().number(), sync_to_var.number()) > 1)
149  mooseError("Does not support multiple components");
150 
151  dof_id_type dof = to_node->dof_number(sync_to_var.sys().number(), sync_to_var.number(), 0);
152  // Copy the solution to the right location
153  sync_to_var.sys().solution().set(dof, _petsc_sol_array[j][i]);
154  }
155 
156  sync_to_var.sys().solution().close();
157 
158  DMDAVecRestoreArray(da, _petsc_sol, &_petsc_sol_array);
159 
160  // Make the solution and the current local solution consistent
161  sync_to_var.sys().update();
162  }
163  else if (direction == Direction::TO_EXTERNAL_APP)
164  {
165  _console << "syncSolutions to external petsc App " << std::endl;
166  // We could the similar thing to sync the solution back to PETSc.
167  }
168 }
virtual Real & time() const
This is an interface to call a pure PETSc solver.
Vec & _petsc_udot
Udot (u_n-u_{n-1})/dt.
static InputParameters validParams()
virtual void syncSolutions(Direction) override
Vec & _petsc_sol
PETSc solver solution.
This is a demo used to demonstrate how to couple an external app through the MOOSE wrapper APP...
virtual Real computeResidualL2Norm() override
virtual void externalSolve() override
PETSC_EXTERN PetscErrorCode externalPETScDiffusionFDMSolve(TS, Vec, Vec, PetscReal, PetscReal, PetscBool *)
void addRequiredParam(const std::string &name, const std::string &doc_string)
virtual const MooseVariableFieldBase & getVariable(const THREAD_ID tid, const std::string &var_name, Moose::VarKindType expected_var_type=Moose::VarKindType::VAR_ANY, Moose::VarFieldType expected_var_field_type=Moose::VarFieldType::VAR_FIELD_ANY) const override
virtual void advanceState()
TS & _ts
PETSc solver.
Vec _petsc_rhs
RHS vector.
MeshBase & getMesh()
Vec & _petsc_sol_old
Solution at the previous time step.
auto norm(const T &a) -> decltype(std::abs(a))
registerMooseObject("ExternalPetscSolverApp", ExternalPETScProblem)
DIE A HORRIBLE DEATH HERE typedef LIBMESH_DEFAULT_SCALAR_TYPE Real
virtual void advanceState() override
PetscBool _petsc_converged
If PETSc solver converged.
virtual MooseMesh & mesh() override
void mooseError(Args &&... args) const
static const std::complex< double > j(0, 1)
Complex number "j" (also known as "i")
const ConsoleStream _console
virtual Real & dt() const
PETSC_EXTERN PetscErrorCode FormInitialSolution(TS, Vec, void *)
const VariableName & _sync_to_var_name
The name of the variable to transfer to.
ExternalPETScProblem(const InputParameters &params)
uint8_t dof_id_type
static InputParameters validParams()