Line data Source code
1 : //* This file is part of the MOOSE framework
2 : //* https://mooseframework.inl.gov
3 : //*
4 : //* All rights reserved, see COPYRIGHT for full restrictions
5 : //* https://github.com/idaholab/moose/blob/master/COPYRIGHT
6 : //*
7 : //* Licensed under LGPL 2.1, please see LICENSE for details
8 : //* https://www.gnu.org/licenses/lgpl-2.1.html
9 :
10 : // MOOSE includes
11 : #include "MultiAppConservativeTransfer.h"
12 : #include "MooseTypes.h"
13 : #include "FEProblem.h"
14 : #include "MultiApp.h"
15 : #include "MooseMesh.h"
16 : #include "UserObject.h"
17 : #include "NearestPointIntegralVariablePostprocessor.h"
18 : #include "SystemBase.h"
19 :
20 : InputParameters
21 173949 : MultiAppConservativeTransfer::validParams()
22 : {
23 173949 : InputParameters params = MultiAppFieldTransfer::validParams();
24 173949 : params.addRequiredParam<std::vector<AuxVariableName>>(
25 : "variable", "The auxiliary variable to store the transferred values in.");
26 173949 : params.addRequiredParam<std::vector<VariableName>>("source_variable",
27 : "The variable to transfer from.");
28 :
29 173949 : params.addParam<std::vector<PostprocessorName>>(
30 : "from_postprocessors_to_be_preserved",
31 : "The name of the Postprocessor in the from-app to evaluate an adjusting factor.");
32 :
33 173949 : params.addParam<std::vector<PostprocessorName>>(
34 : "to_postprocessors_to_be_preserved",
35 : {},
36 : "The name of the Postprocessor in the to-app to evaluate an adjusting factor.");
37 521847 : params.addParam<bool>("allow_skipped_adjustment",
38 347898 : false,
39 : "If set to true, the transfer skips adjustment when from or to "
40 : "postprocessor values are either zero or have different signs. If set to "
41 : "false, an error is thrown when encountering these conditions.");
42 173949 : params.addParamNamesToGroup("from_postprocessors_to_be_preserved "
43 : "to_postprocessors_to_be_preserved allow_skipped_adjustment",
44 : "Conservative transfer");
45 :
46 173949 : return params;
47 0 : }
48 :
49 8519 : MultiAppConservativeTransfer::MultiAppConservativeTransfer(const InputParameters & parameters)
50 : : MultiAppFieldTransfer(parameters),
51 17038 : _from_var_names(isParamValid("source_variable")
52 8519 : ? getParam<std::vector<VariableName>>("source_variable")
53 : : std::vector<VariableName>()),
54 8519 : _to_var_names(getParam<std::vector<AuxVariableName>>("variable")),
55 8519 : _preserve_transfer(isParamValid("from_postprocessors_to_be_preserved")),
56 8519 : _from_postprocessors_to_be_preserved(
57 8519 : _preserve_transfer
58 8519 : ? getParam<std::vector<PostprocessorName>>("from_postprocessors_to_be_preserved")
59 : : std::vector<PostprocessorName>{}),
60 8519 : _to_postprocessors_to_be_preserved(
61 : getParam<std::vector<PostprocessorName>>("to_postprocessors_to_be_preserved")),
62 8519 : _use_nearestpoint_pps(false),
63 25557 : _allow_skipped_adjustment(getParam<bool>("allow_skipped_adjustment"))
64 : {
65 8519 : if (_directions.size() != 1)
66 0 : paramError("direction", "This transfer is only unidirectional");
67 :
68 8519 : if (_preserve_transfer)
69 : {
70 : /*
71 : * Not sure how important to support multi variables
72 : * Let us handle the single variable case only right now if the conservative capability is on
73 : */
74 198 : if (_to_var_names.size() != 1)
75 0 : paramError("variable",
76 : " Support single variable only when the conservative capability is on ");
77 :
78 198 : if (_current_direction == TO_MULTIAPP)
79 : {
80 124 : if (_from_postprocessors_to_be_preserved.size() != getToMultiApp()->numGlobalApps() &&
81 28 : _from_postprocessors_to_be_preserved.size() != 1)
82 0 : paramError("from_postprocessors_to_be_preserved",
83 : "Number of from-postprocessors should equal to the number of subapps, or use "
84 : "NearestPointIntegralVariablePostprocessor");
85 96 : if (_to_postprocessors_to_be_preserved.size() != 1)
86 0 : paramError("to_postprocessors_to_be_preserved",
87 : "Number of to-postprocessors should equal to 1");
88 : }
89 102 : else if (_current_direction == FROM_MULTIAPP)
90 : {
91 102 : if (_from_postprocessors_to_be_preserved.size() != 1)
92 0 : paramError("from_postprocessors_to_be_preserved",
93 : "Number of from Postprocessors should equal to 1");
94 :
95 178 : if (_to_postprocessors_to_be_preserved.size() != getFromMultiApp()->numGlobalApps() &&
96 76 : _to_postprocessors_to_be_preserved.size() != 1)
97 0 : paramError("to_postprocessors_to_be_preserved",
98 : "_to_postprocessors_to_be_preserved",
99 : "Number of to Postprocessors should equal to the number of subapps, or use "
100 : "NearestPointIntegralVariablePostprocessor ");
101 : }
102 : }
103 :
104 : /* Have to specify at least one to-variable */
105 8519 : if (_to_var_names.size() == 0)
106 0 : paramError("variable", "You need to specify at least one variable");
107 :
108 : /* Right now, most of transfers support one variable only */
109 8519 : if (_to_var_names.size() == 1)
110 8307 : _to_var_name = _to_var_names[0];
111 :
112 8519 : if (_from_var_names.size() == 1)
113 6719 : _from_var_name = _from_var_names[0];
114 8519 : }
115 :
116 : void
117 8331 : MultiAppConservativeTransfer::initialSetup()
118 : {
119 8331 : MultiAppFieldTransfer::initialSetup();
120 8331 : if (_preserve_transfer)
121 : {
122 198 : if (_from_postprocessors_to_be_preserved.size() == 1 && _current_direction == TO_MULTIAPP)
123 : {
124 80 : FEProblemBase & from_problem = getToMultiApp()->problemBase();
125 80 : auto * pps = dynamic_cast<const NearestPointIntegralVariablePostprocessor *>(
126 80 : &(from_problem.getUserObjectBase(_from_postprocessors_to_be_preserved[0])));
127 80 : if (pps)
128 28 : _use_nearestpoint_pps = true;
129 : else
130 : {
131 52 : _use_nearestpoint_pps = false;
132 52 : if (getToMultiApp()->numGlobalApps() > 1)
133 0 : mooseError(
134 : " You have to specify ",
135 0 : getToMultiApp()->numGlobalApps(),
136 : " regular from-postprocessors, or use NearestPointIntegralVariablePostprocessor ");
137 : }
138 : }
139 :
140 198 : if (_to_postprocessors_to_be_preserved.size() == 1 && _current_direction == FROM_MULTIAPP)
141 : {
142 86 : FEProblemBase & to_problem = getFromMultiApp()->problemBase();
143 86 : auto * pps = dynamic_cast<const NearestPointIntegralVariablePostprocessor *>(
144 86 : &(to_problem.getUserObjectBase(_to_postprocessors_to_be_preserved[0])));
145 86 : if (pps)
146 76 : _use_nearestpoint_pps = true;
147 : else
148 : {
149 10 : _use_nearestpoint_pps = false;
150 10 : if (getFromMultiApp()->numGlobalApps() > 1)
151 0 : mooseError(
152 : " You have to specify ",
153 0 : getFromMultiApp()->numGlobalApps(),
154 : " regular to-postprocessors, or use NearestPointIntegralVariablePostprocessor ");
155 : }
156 : }
157 :
158 198 : const auto multi_app = hasFromMultiApp() ? getFromMultiApp() : getToMultiApp();
159 :
160 : // Let us check execute_on here. Users need to specify execute_on='transfer' in their input
161 : // files for the postprocessors that are used to compute the quantities to conserve in the
162 : // Parent app
163 198 : FEProblemBase & parent_problem = multi_app->problemBase();
164 198 : std::vector<PostprocessorName> pps_empty;
165 : // PPs for parent app
166 : auto & parent_app_pps =
167 198 : _current_direction == TO_MULTIAPP ? pps_empty : _to_postprocessors_to_be_preserved;
168 308 : for (auto & pp : parent_app_pps)
169 : {
170 : // Get out all execute_on options for parent app source pp
171 118 : auto & execute_on = parent_problem.getUserObjectBase(pp).getExecuteOnEnum();
172 118 : const auto & type = parent_problem.getUserObjectBase(pp).type();
173 : // Check if parent app has transfer execute_on
174 118 : if (!execute_on.isValueSet(EXEC_TRANSFER))
175 8 : mooseError(
176 8 : "execute_on='transfer' is required in the conservative transfer for " + type + " '",
177 : pp,
178 : "' computed in the parent application.\n"
179 : "Please add execute_on='transfer' to this postprocessor in the input file.\n"
180 : "For a custom postprocessor, make sure that execute_on options are not hardcoded.");
181 : }
182 :
183 : // Sub apps
184 504 : for (unsigned int i = 0; i < multi_app->numGlobalApps(); i++)
185 : {
186 : // If we do not have this app, we skip
187 318 : if (!multi_app->hasLocalApp(i))
188 60 : continue;
189 : // Sub problem for
190 258 : FEProblemBase & sub_problem = multi_app->appProblemBase(i);
191 : // PPs for this subapp
192 : auto & sub_pps =
193 258 : _current_direction == TO_MULTIAPP ? _to_postprocessors_to_be_preserved : pps_empty;
194 376 : for (auto & sub_pp : sub_pps)
195 : {
196 : // Get out of all execute_on options for sub pp
197 122 : auto & execute_on = sub_problem.getUserObjectBase(sub_pp).getExecuteOnEnum();
198 122 : const auto & type = sub_problem.getUserObjectBase(sub_pp).type();
199 : // Check if sub pp has transfer execute_on
200 122 : if (!execute_on.isValueSet(EXEC_TRANSFER))
201 4 : mooseError(
202 4 : "execute_on='transfer' is required in the conservative transfer for " + type + " '",
203 : sub_pp,
204 4 : "' in child application '" + multi_app->name() +
205 : "'. \n"
206 : "Please add execute_on='transfer' to this postprocessor in the input file.\n"
207 : "For a custom postprocessor, make sure that execute_on options are not "
208 : "hardcoded.");
209 : }
210 : }
211 186 : }
212 8319 : }
213 :
214 : void
215 61066 : MultiAppConservativeTransfer::postExecute()
216 : {
217 61066 : if (_preserve_transfer)
218 : {
219 184 : TIME_SECTION("MultiAppConservativeTransfer::execute()",
220 : 5,
221 : "Post transfer to preserve postprocessor values");
222 :
223 184 : if (_current_direction == TO_MULTIAPP)
224 : {
225 77 : FEProblemBase & from_problem = getToMultiApp()->problemBase();
226 77 : if (_use_nearestpoint_pps)
227 44 : from_problem.computeUserObjectByName(
228 22 : EXEC_TRANSFER, Moose::POST_AUX, _from_postprocessors_to_be_preserved[0]);
229 :
230 187 : for (unsigned int i = 0; i < getToMultiApp()->numGlobalApps(); i++)
231 110 : if (getToMultiApp()->hasLocalApp(i))
232 : {
233 92 : if (_use_nearestpoint_pps)
234 64 : adjustTransferredSolutionNearestPoint(i,
235 : &from_problem,
236 32 : _from_postprocessors_to_be_preserved[0],
237 64 : getToMultiApp()->appProblemBase(i),
238 32 : _to_postprocessors_to_be_preserved[0]);
239 : else
240 120 : adjustTransferredSolution(&from_problem,
241 60 : _from_postprocessors_to_be_preserved[i],
242 120 : getToMultiApp()->appProblemBase(i),
243 60 : _to_postprocessors_to_be_preserved[0]);
244 : }
245 : }
246 :
247 107 : else if (_current_direction == FROM_MULTIAPP)
248 : {
249 107 : FEProblemBase & to_problem = getFromMultiApp()->problemBase();
250 107 : if (_use_nearestpoint_pps)
251 132 : to_problem.computeUserObjectByName(
252 66 : EXEC_TRANSFER, Moose::POST_AUX, _to_postprocessors_to_be_preserved[0]);
253 :
254 291 : for (unsigned int i = 0; i < getFromMultiApp()->numGlobalApps(); i++)
255 : {
256 184 : if (_use_nearestpoint_pps)
257 132 : adjustTransferredSolutionNearestPoint(
258 : i,
259 264 : getFromMultiApp()->hasLocalApp(i) ? &getFromMultiApp()->appProblemBase(i) : nullptr,
260 132 : _from_postprocessors_to_be_preserved[0],
261 : to_problem,
262 132 : _to_postprocessors_to_be_preserved[0]);
263 : else
264 52 : adjustTransferredSolution(
265 104 : getFromMultiApp()->hasLocalApp(i) ? &getFromMultiApp()->appProblemBase(i) : nullptr,
266 52 : _from_postprocessors_to_be_preserved[0],
267 : to_problem,
268 52 : _to_postprocessors_to_be_preserved[i]);
269 : }
270 :
271 : // Compute the to-postprocessor again so that it has the right value with the updated solution
272 107 : if (_use_nearestpoint_pps)
273 132 : to_problem.computeUserObjectByName(
274 66 : EXEC_TRANSFER, Moose::POST_AUX, _to_postprocessors_to_be_preserved[0]);
275 : }
276 184 : }
277 61066 : }
278 :
279 : void
280 164 : MultiAppConservativeTransfer::adjustTransferredSolutionNearestPoint(
281 : unsigned int i,
282 : FEProblemBase * from_problem,
283 : PostprocessorName & from_postprocessor,
284 : FEProblemBase & to_problem,
285 : PostprocessorName & to_postprocessor)
286 : {
287 164 : PostprocessorValue from_adjuster = 0;
288 164 : if (from_problem && _current_direction == FROM_MULTIAPP)
289 96 : from_adjuster = from_problem->getPostprocessorValueByName(from_postprocessor);
290 : else
291 68 : from_adjuster = 0;
292 :
293 : /* Everyone on the parent application side should know this value; use it to scale the solution */
294 164 : if (_current_direction == FROM_MULTIAPP)
295 : {
296 : /* In this case, only one subapp has value, and other subapps' must be zero.
297 : * We should see the maximum value.
298 : */
299 132 : PostprocessorValue from_adjuster_tmp = from_adjuster;
300 132 : comm().max(from_adjuster);
301 :
302 : /* We may have a negative value */
303 132 : if (MooseUtils::absoluteFuzzyLessEqual(from_adjuster, 0.))
304 : {
305 0 : comm().min(from_adjuster_tmp);
306 0 : from_adjuster = from_adjuster_tmp;
307 : }
308 : }
309 :
310 164 : PostprocessorValue to_adjuster = 0;
311 : // Compute to-postprocessor to have the adjuster
312 164 : if (_current_direction == TO_MULTIAPP)
313 : {
314 32 : to_problem.computeUserObjectByName(EXEC_TRANSFER, Moose::POST_AUX, to_postprocessor);
315 32 : to_adjuster = to_problem.getPostprocessorValueByName(to_postprocessor);
316 : }
317 :
318 164 : auto & to_var = to_problem.getVariable(
319 : 0, _to_var_name, Moose::VarKindType::VAR_ANY, Moose::VarFieldType::VAR_FIELD_STANDARD);
320 164 : auto & to_sys = to_var.sys().system();
321 164 : auto var_num = to_sys.variable_number(_to_var_name);
322 164 : auto sys_num = to_sys.number();
323 : auto & pps = static_cast<const NearestPointIntegralVariablePostprocessor &>(
324 296 : _current_direction == FROM_MULTIAPP ? (to_problem.getUserObjectBase(to_postprocessor))
325 164 : : (from_problem->getUserObjectBase(from_postprocessor)));
326 164 : auto & to_solution = to_var.sys().solution();
327 164 : auto & to_mesh = to_problem.mesh().getMesh();
328 164 : bool is_nodal = to_sys.variable_type(var_num).family == LAGRANGE;
329 164 : if (is_nodal)
330 : {
331 608312 : for (const auto & node : to_mesh.local_node_ptr_range())
332 : {
333 : // Skip this node if the variable has no dofs at it.
334 304096 : if (node->n_dofs(sys_num, var_num) < 1)
335 3200 : continue;
336 :
337 300896 : Real scale = 1;
338 300896 : if (_current_direction == FROM_MULTIAPP)
339 : {
340 300224 : auto ii = pps.nearestPointIndex(*node);
341 300224 : if (ii != i || !performAdjustment(from_adjuster, pps.userObjectValue(i)))
342 150112 : continue;
343 :
344 150112 : scale = from_adjuster / pps.userObjectValue(i);
345 : }
346 : else
347 : {
348 672 : if (!performAdjustment(pps.userObjectValue(i), to_adjuster))
349 0 : continue;
350 :
351 672 : scale = pps.userObjectValue(i) / to_adjuster;
352 : }
353 :
354 : /* Need to scale this node */
355 150784 : dof_id_type dof = node->dof_number(sys_num, var_num, 0);
356 150784 : to_solution.set(dof, scale * to_solution(dof));
357 120 : }
358 : }
359 : else
360 : {
361 512044 : for (auto & elem : as_range(to_mesh.local_elements_begin(), to_mesh.local_elements_end()))
362 : {
363 : // Skip this element if the variable has no dofs at it.
364 256000 : if (elem->n_dofs(sys_num, var_num) < 1)
365 0 : continue;
366 :
367 256000 : Real scale = 1;
368 256000 : if (_current_direction == FROM_MULTIAPP)
369 : {
370 256000 : unsigned int ii = pps.nearestPointIndex(elem->vertex_average());
371 256000 : if (ii != i || !performAdjustment(from_adjuster, pps.userObjectValue(i)))
372 128000 : continue;
373 :
374 128000 : scale = from_adjuster / pps.userObjectValue(i);
375 : }
376 : else
377 : {
378 0 : if (!performAdjustment(pps.userObjectValue(i), to_adjuster))
379 0 : continue;
380 :
381 0 : scale = pps.userObjectValue(i) / to_adjuster;
382 : }
383 :
384 128000 : dof_id_type dof = elem->dof_number(sys_num, var_num, 0);
385 128000 : to_solution.set(dof, scale * to_solution(dof));
386 44 : }
387 : }
388 :
389 164 : to_solution.close();
390 164 : to_sys.update();
391 :
392 : // Compute the to-postprocessor again so that it has the right value with the updated solution
393 164 : if (_current_direction == TO_MULTIAPP)
394 32 : to_problem.computeUserObjectByName(EXEC_TRANSFER, Moose::POST_AUX, to_postprocessor);
395 164 : }
396 :
397 : void
398 112 : MultiAppConservativeTransfer::adjustTransferredSolution(FEProblemBase * from_problem,
399 : PostprocessorName & from_postprocessor,
400 : FEProblemBase & to_problem,
401 : PostprocessorName & to_postprocessor)
402 : {
403 112 : PostprocessorValue from_adjuster = 0;
404 112 : if (from_problem)
405 106 : from_adjuster = from_problem->getPostprocessorValueByName(from_postprocessor);
406 : else
407 6 : from_adjuster = 0;
408 :
409 : /* Everyone on the parent side should know this value; use it to scale the solution */
410 112 : if (_current_direction == FROM_MULTIAPP)
411 : {
412 : /* In this case, only one subapp has value, and other subapps' must be zero.
413 : * We should see the maximum value.
414 : */
415 52 : PostprocessorValue from_adjuster_tmp = from_adjuster;
416 52 : comm().max(from_adjuster);
417 :
418 : /* We may have a negative value, and let us try it again */
419 52 : if (MooseUtils::absoluteFuzzyLessEqual(from_adjuster, 0.))
420 : {
421 0 : comm().min(from_adjuster_tmp);
422 0 : from_adjuster = from_adjuster_tmp;
423 : }
424 : }
425 :
426 : // Compute to-postprocessor to have the adjuster
427 112 : to_problem.computeUserObjectByName(EXEC_TRANSFER, Moose::POST_AUX, to_postprocessor);
428 :
429 : // Now we should have the right adjuster based on the transferred solution
430 112 : const auto to_adjuster = to_problem.getPostprocessorValueByName(to_postprocessor);
431 :
432 : // decide if the adjustment should be performed
433 112 : if (!performAdjustment(from_adjuster, to_adjuster))
434 11 : return;
435 :
436 101 : auto & to_var = to_problem.getVariable(
437 : 0, _to_var_name, Moose::VarKindType::VAR_ANY, Moose::VarFieldType::VAR_FIELD_STANDARD);
438 101 : auto & to_sys = to_var.sys().system();
439 101 : auto var_num = to_sys.variable_number(_to_var_name);
440 101 : auto sys_num = to_sys.number();
441 : auto * pps =
442 101 : dynamic_cast<const BlockRestrictable *>(&(to_problem.getUserObjectBase(to_postprocessor)));
443 101 : auto & to_solution = to_var.sys().solution();
444 101 : auto & to_mesh = to_problem.mesh().getMesh();
445 101 : auto & moose_mesh = to_problem.mesh();
446 101 : bool is_nodal = to_sys.variable_type(var_num).family == LAGRANGE;
447 101 : if (is_nodal)
448 : {
449 7449 : for (const auto & node : to_mesh.local_node_ptr_range())
450 : {
451 : // Skip this node if the variable has no dofs at it.
452 7400 : if (node->n_dofs(sys_num, var_num) < 1)
453 1600 : continue;
454 :
455 5800 : bool scale_current_node = false;
456 : /* If we care about block IDs */
457 5800 : if (pps)
458 : {
459 5800 : auto & blockids = pps->blockIDs();
460 5800 : auto & node_to_elem_map = moose_mesh.nodeToElemMap();
461 5800 : auto neighbor_elements = node_to_elem_map.find(node->id());
462 8985 : for (auto element : neighbor_elements->second)
463 : {
464 8105 : auto & elem = to_mesh.elem_ref(element);
465 8105 : if (blockids.find(elem.subdomain_id()) != blockids.end())
466 : {
467 4920 : scale_current_node = true;
468 4920 : break;
469 : }
470 : }
471 : }
472 : else
473 : {
474 0 : scale_current_node = true;
475 : }
476 : /* Need to scale this node */
477 5800 : if (scale_current_node)
478 : {
479 4920 : dof_id_type dof = node->dof_number(sys_num, var_num, 0);
480 4920 : to_solution.set(dof, (from_adjuster / to_adjuster) * to_solution(dof));
481 : }
482 49 : }
483 : }
484 : else
485 : {
486 6977 : for (auto & elem : as_range(to_mesh.local_elements_begin(), to_mesh.local_elements_end()))
487 : {
488 : // Skip this element if the variable has no dofs at it.
489 6925 : if (elem->n_dofs(sys_num, var_num) < 1)
490 0 : continue;
491 :
492 6925 : bool scale_current_element = false;
493 6925 : if (pps)
494 : {
495 6925 : auto & blockids = pps->blockIDs();
496 6925 : if (blockids.find(elem->subdomain_id()) != blockids.end())
497 : {
498 6673 : scale_current_element = true;
499 : }
500 : }
501 : else
502 : {
503 0 : scale_current_element = true;
504 : }
505 6925 : if (scale_current_element)
506 : {
507 6673 : unsigned int n_comp = elem->n_comp(sys_num, var_num);
508 :
509 22946 : for (unsigned int offset = 0; offset < n_comp; offset++)
510 : {
511 16273 : dof_id_type dof = elem->dof_number(sys_num, var_num, offset);
512 16273 : to_solution.set(dof, (from_adjuster / to_adjuster) * to_solution(dof));
513 : }
514 : }
515 52 : }
516 : }
517 :
518 101 : to_solution.close();
519 101 : to_sys.update();
520 :
521 : // Compute again so that the post-processor has the value with the updated solution
522 101 : to_problem.computeUserObjectByName(EXEC_TRANSFER, Moose::POST_AUX, to_postprocessor);
523 : }
524 :
525 : bool
526 278896 : MultiAppConservativeTransfer::performAdjustment(const PostprocessorValue & from,
527 : const PostprocessorValue & to) const
528 : {
529 278896 : if (from * to > 0)
530 278885 : return true;
531 11 : else if (_allow_skipped_adjustment)
532 11 : return false;
533 : else
534 0 : mooseError("Adjustment postprocessors from: ",
535 : from,
536 : " to: ",
537 : to,
538 : " must both have the same sign and be different from 0");
539 : }
|