Line data Source code
1 : //* This file is part of the MOOSE framework
2 : //* https://mooseframework.inl.gov
3 : //*
4 : //* All rights reserved, see COPYRIGHT for full restrictions
5 : //* https://github.com/idaholab/moose/blob/master/COPYRIGHT
6 : //*
7 : //* Licensed under LGPL 2.1, please see LICENSE for details
8 : //* https://www.gnu.org/licenses/lgpl-2.1.html
9 :
10 : // MOOSE includes
11 : #include "MultiAppConservativeTransfer.h"
12 : #include "MooseTypes.h"
13 : #include "FEProblem.h"
14 : #include "MultiApp.h"
15 : #include "MooseMesh.h"
16 : #include "UserObject.h"
17 : #include "NearestPointIntegralVariablePostprocessor.h"
18 : #include "SystemBase.h"
19 :
20 : InputParameters
21 175263 : MultiAppConservativeTransfer::validParams()
22 : {
23 175263 : InputParameters params = MultiAppFieldTransfer::validParams();
24 175263 : params.addRequiredParam<std::vector<AuxVariableName>>(
25 : "variable", "The auxiliary variable to store the transferred values in.");
26 175263 : params.addRequiredParam<std::vector<VariableName>>("source_variable",
27 : "The variable to transfer from.");
28 :
29 175263 : params.addParam<std::vector<PostprocessorName>>(
30 : "from_postprocessors_to_be_preserved",
31 : "The name of the Postprocessor in the from-app to evaluate an adjusting factor.");
32 :
33 175263 : params.addParam<std::vector<PostprocessorName>>(
34 : "to_postprocessors_to_be_preserved",
35 : {},
36 : "The name of the Postprocessor in the to-app to evaluate an adjusting factor.");
37 525789 : params.addParam<bool>("allow_skipped_adjustment",
38 350526 : false,
39 : "If set to true, the transfer skips adjustment when from or to "
40 : "postprocessor values are either zero or have different signs. If set to "
41 : "false, an error is thrown when encountering these conditions.");
42 175263 : params.addParamNamesToGroup("from_postprocessors_to_be_preserved "
43 : "to_postprocessors_to_be_preserved allow_skipped_adjustment",
44 : "Conservative transfer");
45 :
46 175263 : return params;
47 0 : }
48 :
49 9176 : MultiAppConservativeTransfer::MultiAppConservativeTransfer(const InputParameters & parameters)
50 : : MultiAppFieldTransfer(parameters),
51 18352 : _from_var_names(isParamValid("source_variable")
52 9176 : ? getParam<std::vector<VariableName>>("source_variable")
53 : : std::vector<VariableName>()),
54 9176 : _to_var_names(getParam<std::vector<AuxVariableName>>("variable")),
55 9176 : _preserve_transfer(isParamValid("from_postprocessors_to_be_preserved")),
56 9176 : _from_postprocessors_to_be_preserved(
57 9176 : _preserve_transfer
58 9176 : ? getParam<std::vector<PostprocessorName>>("from_postprocessors_to_be_preserved")
59 : : std::vector<PostprocessorName>{}),
60 9176 : _to_postprocessors_to_be_preserved(
61 : getParam<std::vector<PostprocessorName>>("to_postprocessors_to_be_preserved")),
62 9176 : _use_nearestpoint_pps(false),
63 27528 : _allow_skipped_adjustment(getParam<bool>("allow_skipped_adjustment"))
64 : {
65 9176 : if (_directions.size() != 1)
66 0 : paramError("direction", "This transfer is only unidirectional");
67 :
68 9176 : if (_preserve_transfer)
69 : {
70 : /*
71 : * Not sure how important to support multi variables
72 : * Let us handle the single variable case only right now if the conservative capability is on
73 : */
74 212 : if (_to_var_names.size() != 1)
75 0 : paramError("variable",
76 : " Support single variable only when the conservative capability is on ");
77 :
78 212 : if (_current_direction == TO_MULTIAPP)
79 : {
80 133 : if (_from_postprocessors_to_be_preserved.size() != getToMultiApp()->numGlobalApps() &&
81 30 : _from_postprocessors_to_be_preserved.size() != 1)
82 0 : paramError("from_postprocessors_to_be_preserved",
83 : "Number of from-postprocessors should equal to the number of subapps, or use "
84 : "NearestPointIntegralVariablePostprocessor");
85 103 : if (_to_postprocessors_to_be_preserved.size() != 1)
86 0 : paramError("to_postprocessors_to_be_preserved",
87 : "Number of to-postprocessors should equal to 1");
88 : }
89 109 : else if (_current_direction == FROM_MULTIAPP)
90 : {
91 109 : if (_from_postprocessors_to_be_preserved.size() != 1)
92 0 : paramError("from_postprocessors_to_be_preserved",
93 : "Number of from Postprocessors should equal to 1");
94 :
95 191 : if (_to_postprocessors_to_be_preserved.size() != getFromMultiApp()->numGlobalApps() &&
96 82 : _to_postprocessors_to_be_preserved.size() != 1)
97 0 : paramError("to_postprocessors_to_be_preserved",
98 : "_to_postprocessors_to_be_preserved",
99 : "Number of to Postprocessors should equal to the number of subapps, or use "
100 : "NearestPointIntegralVariablePostprocessor ");
101 : }
102 : }
103 :
104 : /* Have to specify at least one to-variable */
105 9176 : if (_to_var_names.size() == 0)
106 0 : paramError("variable", "You need to specify at least one variable");
107 :
108 : /* Right now, most of transfers support one variable only */
109 9176 : if (_to_var_names.size() == 1)
110 8952 : _to_var_name = _to_var_names[0];
111 :
112 9176 : if (_from_var_names.size() == 1)
113 7241 : _from_var_name = _from_var_names[0];
114 9176 : }
115 :
116 : void
117 8988 : MultiAppConservativeTransfer::initialSetup()
118 : {
119 8988 : MultiAppFieldTransfer::initialSetup();
120 8988 : if (_preserve_transfer)
121 : {
122 212 : if (_from_postprocessors_to_be_preserved.size() == 1 && _current_direction == TO_MULTIAPP)
123 : {
124 86 : FEProblemBase & from_problem = getToMultiApp()->problemBase();
125 86 : auto * pps = dynamic_cast<const NearestPointIntegralVariablePostprocessor *>(
126 86 : &(from_problem.getUserObjectBase(_from_postprocessors_to_be_preserved[0])));
127 86 : if (pps)
128 30 : _use_nearestpoint_pps = true;
129 : else
130 : {
131 56 : _use_nearestpoint_pps = false;
132 56 : if (getToMultiApp()->numGlobalApps() > 1)
133 0 : mooseError(
134 : " You have to specify ",
135 0 : getToMultiApp()->numGlobalApps(),
136 : " regular from-postprocessors, or use NearestPointIntegralVariablePostprocessor ");
137 : }
138 : }
139 :
140 212 : if (_to_postprocessors_to_be_preserved.size() == 1 && _current_direction == FROM_MULTIAPP)
141 : {
142 92 : FEProblemBase & to_problem = getFromMultiApp()->problemBase();
143 92 : auto * pps = dynamic_cast<const NearestPointIntegralVariablePostprocessor *>(
144 92 : &(to_problem.getUserObjectBase(_to_postprocessors_to_be_preserved[0])));
145 92 : if (pps)
146 82 : _use_nearestpoint_pps = true;
147 : else
148 : {
149 10 : _use_nearestpoint_pps = false;
150 10 : if (getFromMultiApp()->numGlobalApps() > 1)
151 0 : mooseError(
152 : " You have to specify ",
153 0 : getFromMultiApp()->numGlobalApps(),
154 : " regular to-postprocessors, or use NearestPointIntegralVariablePostprocessor ");
155 : }
156 : }
157 :
158 212 : const auto multi_app = hasFromMultiApp() ? getFromMultiApp() : getToMultiApp();
159 :
160 : // Let us check execute_on here. Users need to specify execute_on='transfer' in their input
161 : // files for the postprocessors that are used to compute the quantities to conserve in the
162 : // Parent app
163 212 : FEProblemBase & parent_problem = multi_app->problemBase();
164 212 : std::vector<PostprocessorName> pps_empty;
165 : // PPs for parent app
166 : auto & parent_app_pps =
167 212 : _current_direction == TO_MULTIAPP ? pps_empty : _to_postprocessors_to_be_preserved;
168 330 : for (auto & pp : parent_app_pps)
169 : {
170 : // Get out all execute_on options for parent app source pp
171 126 : auto & execute_on = parent_problem.getUserObjectBase(pp).getExecuteOnEnum();
172 126 : const auto & type = parent_problem.getUserObjectBase(pp).type();
173 : // Check if parent app has transfer execute_on
174 126 : if (!execute_on.isValueSet(EXEC_TRANSFER))
175 8 : mooseError(
176 8 : "execute_on='transfer' is required in the conservative transfer for " + type + " '",
177 : pp,
178 : "' computed in the parent application.\n"
179 : "Please add execute_on='transfer' to this postprocessor in the input file.\n"
180 : "For a custom postprocessor, make sure that execute_on options are not hardcoded.");
181 : }
182 :
183 : // Sub apps
184 542 : for (unsigned int i = 0; i < multi_app->numGlobalApps(); i++)
185 : {
186 : // If we do not have this app, we skip
187 342 : if (!multi_app->hasLocalApp(i))
188 60 : continue;
189 : // Sub problem for
190 282 : FEProblemBase & sub_problem = multi_app->appProblemBase(i);
191 : // PPs for this subapp
192 : auto & sub_pps =
193 282 : _current_direction == TO_MULTIAPP ? _to_postprocessors_to_be_preserved : pps_empty;
194 410 : for (auto & sub_pp : sub_pps)
195 : {
196 : // Get out of all execute_on options for sub pp
197 132 : auto & execute_on = sub_problem.getUserObjectBase(sub_pp).getExecuteOnEnum();
198 132 : const auto & type = sub_problem.getUserObjectBase(sub_pp).type();
199 : // Check if sub pp has transfer execute_on
200 132 : if (!execute_on.isValueSet(EXEC_TRANSFER))
201 4 : mooseError(
202 4 : "execute_on='transfer' is required in the conservative transfer for " + type + " '",
203 : sub_pp,
204 4 : "' in child application '" + multi_app->name() +
205 : "'. \n"
206 : "Please add execute_on='transfer' to this postprocessor in the input file.\n"
207 : "For a custom postprocessor, make sure that execute_on options are not "
208 : "hardcoded.");
209 : }
210 : }
211 200 : }
212 8976 : }
213 :
214 : void
215 64966 : MultiAppConservativeTransfer::postExecute()
216 : {
217 64966 : if (_preserve_transfer)
218 : {
219 202 : TIME_SECTION("MultiAppConservativeTransfer::execute()",
220 : 5,
221 : "Post transfer to preserve postprocessor values");
222 :
223 202 : if (_current_direction == TO_MULTIAPP)
224 : {
225 84 : FEProblemBase & from_problem = getToMultiApp()->problemBase();
226 84 : if (_use_nearestpoint_pps)
227 48 : from_problem.computeUserObjectByName(
228 24 : EXEC_TRANSFER, Moose::POST_AUX, _from_postprocessors_to_be_preserved[0]);
229 :
230 204 : for (unsigned int i = 0; i < getToMultiApp()->numGlobalApps(); i++)
231 120 : if (getToMultiApp()->hasLocalApp(i))
232 : {
233 102 : if (_use_nearestpoint_pps)
234 72 : adjustTransferredSolutionNearestPoint(i,
235 : &from_problem,
236 36 : _from_postprocessors_to_be_preserved[0],
237 72 : getToMultiApp()->appProblemBase(i),
238 36 : _to_postprocessors_to_be_preserved[0]);
239 : else
240 132 : adjustTransferredSolution(&from_problem,
241 66 : _from_postprocessors_to_be_preserved[i],
242 132 : getToMultiApp()->appProblemBase(i),
243 66 : _to_postprocessors_to_be_preserved[0]);
244 : }
245 : }
246 :
247 118 : else if (_current_direction == FROM_MULTIAPP)
248 : {
249 118 : FEProblemBase & to_problem = getFromMultiApp()->problemBase();
250 118 : if (_use_nearestpoint_pps)
251 152 : to_problem.computeUserObjectByName(
252 76 : EXEC_TRANSFER, Moose::POST_AUX, _to_postprocessors_to_be_preserved[0]);
253 :
254 324 : for (unsigned int i = 0; i < getFromMultiApp()->numGlobalApps(); i++)
255 : {
256 206 : if (_use_nearestpoint_pps)
257 152 : adjustTransferredSolutionNearestPoint(
258 : i,
259 304 : getFromMultiApp()->hasLocalApp(i) ? &getFromMultiApp()->appProblemBase(i) : nullptr,
260 152 : _from_postprocessors_to_be_preserved[0],
261 : to_problem,
262 152 : _to_postprocessors_to_be_preserved[0]);
263 : else
264 54 : adjustTransferredSolution(
265 108 : getFromMultiApp()->hasLocalApp(i) ? &getFromMultiApp()->appProblemBase(i) : nullptr,
266 54 : _from_postprocessors_to_be_preserved[0],
267 : to_problem,
268 54 : _to_postprocessors_to_be_preserved[i]);
269 : }
270 :
271 : // Compute the to-postprocessor again so that it has the right value with the updated solution
272 118 : if (_use_nearestpoint_pps)
273 152 : to_problem.computeUserObjectByName(
274 76 : EXEC_TRANSFER, Moose::POST_AUX, _to_postprocessors_to_be_preserved[0]);
275 : }
276 202 : }
277 64966 : }
278 :
279 : void
280 188 : MultiAppConservativeTransfer::adjustTransferredSolutionNearestPoint(
281 : unsigned int i,
282 : FEProblemBase * from_problem,
283 : PostprocessorName & from_postprocessor,
284 : FEProblemBase & to_problem,
285 : PostprocessorName & to_postprocessor)
286 : {
287 188 : PostprocessorValue from_adjuster = 0;
288 188 : if (from_problem && _current_direction == FROM_MULTIAPP)
289 116 : from_adjuster = from_problem->getPostprocessorValueByName(from_postprocessor);
290 : else
291 72 : from_adjuster = 0;
292 :
293 : /* Everyone on the parent application side should know this value; use it to scale the solution */
294 188 : if (_current_direction == FROM_MULTIAPP)
295 : {
296 : /* In this case, only one subapp has value, and other subapps' must be zero.
297 : * We should see the maximum value.
298 : */
299 152 : PostprocessorValue from_adjuster_tmp = from_adjuster;
300 152 : comm().max(from_adjuster);
301 :
302 : /* We may have a negative value */
303 152 : if (MooseUtils::absoluteFuzzyLessEqual(from_adjuster, 0.))
304 : {
305 0 : comm().min(from_adjuster_tmp);
306 0 : from_adjuster = from_adjuster_tmp;
307 : }
308 : }
309 :
310 188 : PostprocessorValue to_adjuster = 0;
311 : // Compute to-postprocessor to have the adjuster
312 188 : if (_current_direction == TO_MULTIAPP)
313 : {
314 36 : to_problem.computeUserObjectByName(EXEC_TRANSFER, Moose::POST_AUX, to_postprocessor);
315 36 : to_adjuster = to_problem.getPostprocessorValueByName(to_postprocessor);
316 : }
317 :
318 188 : auto & to_var = to_problem.getVariable(
319 : 0, _to_var_name, Moose::VarKindType::VAR_ANY, Moose::VarFieldType::VAR_FIELD_STANDARD);
320 188 : auto & to_sys = to_var.sys().system();
321 188 : auto var_num = to_sys.variable_number(_to_var_name);
322 188 : auto sys_num = to_sys.number();
323 : auto & pps = static_cast<const NearestPointIntegralVariablePostprocessor &>(
324 340 : _current_direction == FROM_MULTIAPP ? (to_problem.getUserObjectBase(to_postprocessor))
325 188 : : (from_problem->getUserObjectBase(from_postprocessor)));
326 188 : auto & to_solution = to_var.sys().solution();
327 188 : auto & to_mesh = to_problem.mesh().getMesh();
328 188 : bool is_nodal = to_sys.variable_type(var_num).family == LAGRANGE;
329 188 : if (is_nodal)
330 : {
331 758440 : for (const auto & node : to_mesh.local_node_ptr_range())
332 : {
333 : // Skip this node if the variable has no dofs at it.
334 379152 : if (node->n_dofs(sys_num, var_num) < 1)
335 3600 : continue;
336 :
337 375552 : Real scale = 1;
338 375552 : if (_current_direction == FROM_MULTIAPP)
339 : {
340 374796 : auto ii = pps.nearestPointIndex(*node);
341 374796 : if (ii != i || !performAdjustment(from_adjuster, pps.userObjectValue(i)))
342 187398 : continue;
343 :
344 187398 : scale = from_adjuster / pps.userObjectValue(i);
345 : }
346 : else
347 : {
348 756 : if (!performAdjustment(pps.userObjectValue(i), to_adjuster))
349 0 : continue;
350 :
351 756 : scale = pps.userObjectValue(i) / to_adjuster;
352 : }
353 :
354 : /* Need to scale this node */
355 188154 : dof_id_type dof = node->dof_number(sys_num, var_num, 0);
356 188154 : to_solution.set(dof, scale * to_solution(dof));
357 136 : }
358 : }
359 : else
360 : {
361 640052 : for (auto & elem : as_range(to_mesh.local_elements_begin(), to_mesh.local_elements_end()))
362 : {
363 : // Skip this element if the variable has no dofs at it.
364 320000 : if (elem->n_dofs(sys_num, var_num) < 1)
365 0 : continue;
366 :
367 320000 : Real scale = 1;
368 320000 : if (_current_direction == FROM_MULTIAPP)
369 : {
370 320000 : unsigned int ii = pps.nearestPointIndex(elem->vertex_average());
371 320000 : if (ii != i || !performAdjustment(from_adjuster, pps.userObjectValue(i)))
372 160000 : continue;
373 :
374 160000 : scale = from_adjuster / pps.userObjectValue(i);
375 : }
376 : else
377 : {
378 0 : if (!performAdjustment(pps.userObjectValue(i), to_adjuster))
379 0 : continue;
380 :
381 0 : scale = pps.userObjectValue(i) / to_adjuster;
382 : }
383 :
384 160000 : dof_id_type dof = elem->dof_number(sys_num, var_num, 0);
385 160000 : to_solution.set(dof, scale * to_solution(dof));
386 52 : }
387 : }
388 :
389 188 : to_solution.close();
390 188 : to_sys.update();
391 :
392 : // Compute the to-postprocessor again so that it has the right value with the updated solution
393 188 : if (_current_direction == TO_MULTIAPP)
394 36 : to_problem.computeUserObjectByName(EXEC_TRANSFER, Moose::POST_AUX, to_postprocessor);
395 188 : }
396 :
397 : void
398 120 : MultiAppConservativeTransfer::adjustTransferredSolution(FEProblemBase * from_problem,
399 : PostprocessorName & from_postprocessor,
400 : FEProblemBase & to_problem,
401 : PostprocessorName & to_postprocessor)
402 : {
403 120 : PostprocessorValue from_adjuster = 0;
404 120 : if (from_problem)
405 114 : from_adjuster = from_problem->getPostprocessorValueByName(from_postprocessor);
406 : else
407 6 : from_adjuster = 0;
408 :
409 : /* Everyone on the parent side should know this value; use it to scale the solution */
410 120 : if (_current_direction == FROM_MULTIAPP)
411 : {
412 : /* In this case, only one subapp has value, and other subapps' must be zero.
413 : * We should see the maximum value.
414 : */
415 54 : PostprocessorValue from_adjuster_tmp = from_adjuster;
416 54 : comm().max(from_adjuster);
417 :
418 : /* We may have a negative value, and let us try it again */
419 54 : if (MooseUtils::absoluteFuzzyLessEqual(from_adjuster, 0.))
420 : {
421 0 : comm().min(from_adjuster_tmp);
422 0 : from_adjuster = from_adjuster_tmp;
423 : }
424 : }
425 :
426 : // Compute to-postprocessor to have the adjuster
427 120 : to_problem.computeUserObjectByName(EXEC_TRANSFER, Moose::POST_AUX, to_postprocessor);
428 :
429 : // Now we should have the right adjuster based on the transferred solution
430 120 : const auto to_adjuster = to_problem.getPostprocessorValueByName(to_postprocessor);
431 :
432 : // decide if the adjustment should be performed
433 120 : if (!performAdjustment(from_adjuster, to_adjuster))
434 12 : return;
435 :
436 108 : auto & to_var = to_problem.getVariable(
437 : 0, _to_var_name, Moose::VarKindType::VAR_ANY, Moose::VarFieldType::VAR_FIELD_STANDARD);
438 108 : auto & to_sys = to_var.sys().system();
439 108 : auto var_num = to_sys.variable_number(_to_var_name);
440 108 : auto sys_num = to_sys.number();
441 : auto * pps =
442 108 : dynamic_cast<const BlockRestrictable *>(&(to_problem.getUserObjectBase(to_postprocessor)));
443 108 : auto & to_solution = to_var.sys().solution();
444 108 : auto & to_mesh = to_problem.mesh().getMesh();
445 108 : auto & moose_mesh = to_problem.mesh();
446 108 : bool is_nodal = to_sys.variable_type(var_num).family == LAGRANGE;
447 108 : if (is_nodal)
448 : {
449 8379 : for (const auto & node : to_mesh.local_node_ptr_range())
450 : {
451 : // Skip this node if the variable has no dofs at it.
452 8325 : if (node->n_dofs(sys_num, var_num) < 1)
453 1800 : continue;
454 :
455 6525 : bool scale_current_node = false;
456 : /* If we care about block IDs */
457 6525 : if (pps)
458 : {
459 6525 : auto & blockids = pps->blockIDs();
460 6525 : auto & node_to_elem_map = moose_mesh.nodeToElemMap();
461 6525 : auto neighbor_elements = node_to_elem_map.find(node->id());
462 10107 : for (auto element : neighbor_elements->second)
463 : {
464 9117 : auto & elem = to_mesh.elem_ref(element);
465 9117 : if (blockids.find(elem.subdomain_id()) != blockids.end())
466 : {
467 5535 : scale_current_node = true;
468 5535 : break;
469 : }
470 : }
471 : }
472 : else
473 : {
474 0 : scale_current_node = true;
475 : }
476 : /* Need to scale this node */
477 6525 : if (scale_current_node)
478 : {
479 5535 : dof_id_type dof = node->dof_number(sys_num, var_num, 0);
480 5535 : to_solution.set(dof, (from_adjuster / to_adjuster) * to_solution(dof));
481 : }
482 54 : }
483 : }
484 : else
485 : {
486 7779 : for (auto & elem : as_range(to_mesh.local_elements_begin(), to_mesh.local_elements_end()))
487 : {
488 : // Skip this element if the variable has no dofs at it.
489 7725 : if (elem->n_dofs(sys_num, var_num) < 1)
490 0 : continue;
491 :
492 7725 : bool scale_current_element = false;
493 7725 : if (pps)
494 : {
495 7725 : auto & blockids = pps->blockIDs();
496 7725 : if (blockids.find(elem->subdomain_id()) != blockids.end())
497 : {
498 7473 : scale_current_element = true;
499 : }
500 : }
501 : else
502 : {
503 0 : scale_current_element = true;
504 : }
505 7725 : if (scale_current_element)
506 : {
507 7473 : unsigned int n_comp = elem->n_comp(sys_num, var_num);
508 :
509 25746 : for (unsigned int offset = 0; offset < n_comp; offset++)
510 : {
511 18273 : dof_id_type dof = elem->dof_number(sys_num, var_num, offset);
512 18273 : to_solution.set(dof, (from_adjuster / to_adjuster) * to_solution(dof));
513 : }
514 : }
515 54 : }
516 : }
517 :
518 108 : to_solution.close();
519 108 : to_sys.update();
520 :
521 : // Compute again so that the post-processor has the value with the updated solution
522 108 : to_problem.computeUserObjectByName(EXEC_TRANSFER, Moose::POST_AUX, to_postprocessor);
523 : }
524 :
525 : bool
526 348274 : MultiAppConservativeTransfer::performAdjustment(const PostprocessorValue & from,
527 : const PostprocessorValue & to) const
528 : {
529 348274 : if (from * to > 0)
530 348262 : return true;
531 12 : else if (_allow_skipped_adjustment)
532 12 : return false;
533 : else
534 0 : mooseError("Adjustment postprocessors from: ",
535 : from,
536 : " to: ",
537 : to,
538 : " must both have the same sign and be different from 0");
539 : }
|