Line data Source code
1 : //* This file is part of the MOOSE framework 2 : //* https://mooseframework.inl.gov 3 : //* 4 : //* All rights reserved, see COPYRIGHT for full restrictions 5 : //* https://github.com/idaholab/moose/blob/master/COPYRIGHT 6 : //* 7 : //* Licensed under LGPL 2.1, please see LICENSE for details 8 : //* https://www.gnu.org/licenses/lgpl-2.1.html 9 : 10 : #include "MultiAppPostprocessorTransfer.h" 11 : 12 : // MOOSE includes 13 : #include "MooseTypes.h" 14 : #include "FEProblem.h" 15 : #include "MultiApp.h" 16 : 17 : // libMesh 18 : #include "libmesh/meshfree_interpolation.h" 19 : #include "libmesh/system.h" 20 : 21 : registerMooseObject("MooseApp", MultiAppPostprocessorTransfer); 22 : 23 : InputParameters 24 16829 : MultiAppPostprocessorTransfer::validParams() 25 : { 26 16829 : InputParameters params = MultiAppTransfer::validParams(); 27 33658 : params.addClassDescription( 28 : "Transfers postprocessor data between the master application and sub-application(s)."); 29 67316 : params.addRequiredParam<PostprocessorName>( 30 : "from_postprocessor", 31 : "The name of the Postprocessor in the Master to transfer the value from."); 32 67316 : params.addRequiredParam<PostprocessorName>( 33 : "to_postprocessor", 34 : "The name of the Postprocessor in the MultiApp to transfer the value to. " 35 : " This should most likely be a Reporter Postprocessor."); 36 67316 : MooseEnum reduction_type("average sum maximum minimum"); 37 50487 : params.addParam<MooseEnum>("reduction_type", 38 : reduction_type, 39 : "The type of reduction to perform to reduce postprocessor " 40 : "values from multiple SubApps to a single value"); 41 16829 : MultiAppTransfer::addUserObjectExecutionCheckParam(params); 42 : 43 33658 : return params; 44 16829 : } 45 : 46 1282 : MultiAppPostprocessorTransfer::MultiAppPostprocessorTransfer(const InputParameters & parameters) 47 : : MultiAppTransfer(parameters), 48 1282 : _from_pp_name(getParam<PostprocessorName>("from_postprocessor")), 49 2564 : _to_pp_name(getParam<PostprocessorName>("to_postprocessor")), 50 3846 : _reduction_type(getParam<MooseEnum>("reduction_type")) 51 : { 52 1282 : if (_directions.size() != 1) 53 0 : paramError("direction", "This transfer is only unidirectional"); 54 : 55 1282 : if (_current_direction == FROM_MULTIAPP) 56 637 : if (!_reduction_type.isValid()) 57 0 : mooseError("In MultiAppPostprocessorTransfer, must specify 'reduction_type' if direction = " 58 : "from_multiapp"); 59 : 60 6444 : if (isParamValid("to_multi_app") && isParamValid("from_multi_app") && 61 1360 : isParamValid("reduction_type")) 62 0 : mooseError("Reductions are not supported for multiapp sibling transfers"); 63 1282 : } 64 : 65 : void 66 54289 : MultiAppPostprocessorTransfer::execute() 67 : { 68 271445 : TIME_SECTION("MultiAppPostprocessorTransfer::execute()", 5, "Transferring a postprocessor"); 69 : 70 : // Execute the postprocessor if it was specified to execute on TRANSFER 71 54289 : switch (_current_direction) 72 : { 73 27453 : case TO_MULTIAPP: 74 : { 75 27453 : checkParentAppUserObjectExecuteOn(_from_pp_name); 76 27453 : _fe_problem.computeUserObjectByName(EXEC_TRANSFER, Moose::PRE_AUX, _from_pp_name); 77 27453 : _fe_problem.computeUserObjectByName(EXEC_TRANSFER, Moose::POST_AUX, _from_pp_name); 78 27453 : break; 79 : } 80 26836 : case FROM_MULTIAPP: 81 : case BETWEEN_MULTIAPP: 82 26836 : errorIfObjectExecutesOnTransferInSourceApp(_from_pp_name); 83 : } 84 : 85 54289 : switch (_current_direction) 86 : { 87 122 : case BETWEEN_MULTIAPP: 88 366 : for (unsigned int i = 0; i < getFromMultiApp()->numGlobalApps(); i++) 89 : { 90 : // Get source postprocessor value 91 244 : Real pp_value = std::numeric_limits<Real>::max(); 92 244 : if (getFromMultiApp()->hasLocalApp(i)) 93 : { 94 184 : FEProblemBase & from_problem = getFromMultiApp()->appProblemBase(i); 95 184 : pp_value = from_problem.getPostprocessorValueByName(_from_pp_name); 96 : } 97 : 98 : // Find the postprocessor value from another process 99 244 : if (getFromMultiApp()->numGlobalApps() == 1) 100 0 : _communicator.min(pp_value); 101 : else 102 : mooseAssert(pp_value != std::numeric_limits<Real>::max() || 103 : !getToMultiApp()->hasLocalApp(i), 104 : "Source and target app parallel distribution must be the same"); 105 : 106 : // Case 1: a single source app, multiple target apps 107 : // All target apps must be local 108 244 : if (getFromMultiApp()->numGlobalApps() == 1) 109 0 : for (const auto j : make_range(getToMultiApp()->numGlobalApps())) 110 : { 111 0 : if (getToMultiApp()->hasLocalApp(j)) 112 0 : getToMultiApp()->appProblemBase(j).setPostprocessorValueByName(_to_pp_name, pp_value); 113 : } 114 : 115 : // Case 2: same number of source and target apps 116 : // The allocation of the child apps on the processors must be the same 117 244 : else if (getToMultiApp()->hasLocalApp(i)) 118 184 : getToMultiApp()->appProblemBase(i).setPostprocessorValueByName(_to_pp_name, pp_value); 119 : } 120 122 : break; 121 27453 : case TO_MULTIAPP: 122 : { 123 27453 : FEProblemBase & from_problem = getToMultiApp()->problemBase(); 124 : 125 27453 : const Real & pp_value = from_problem.getPostprocessorValueByName(_from_pp_name); 126 : 127 54967 : for (unsigned int i = 0; i < getToMultiApp()->numGlobalApps(); i++) 128 27514 : if (getToMultiApp()->hasLocalApp(i)) 129 27484 : getToMultiApp()->appProblemBase(i).setPostprocessorValueByName(_to_pp_name, pp_value); 130 27453 : break; 131 : } 132 26714 : case FROM_MULTIAPP: 133 : { 134 26714 : FEProblemBase & to_problem = getFromMultiApp()->problemBase(); 135 : 136 : Real reduced_pp_value; 137 26714 : switch (_reduction_type) 138 : { 139 26662 : case AVERAGE: 140 : case SUM: 141 26662 : reduced_pp_value = 0; 142 26662 : break; 143 26 : case MAXIMUM: 144 26 : reduced_pp_value = -std::numeric_limits<Real>::max(); 145 26 : break; 146 26 : case MINIMUM: 147 26 : reduced_pp_value = std::numeric_limits<Real>::max(); 148 26 : break; 149 0 : default: 150 0 : mooseError( 151 : "Can't get here unless someone adds a new enum and fails to add it to this switch"); 152 : } 153 : 154 26714 : const auto multi_app = hasFromMultiApp() ? getFromMultiApp() : getToMultiApp(); 155 : 156 53480 : for (unsigned int i = 0; i < multi_app->numGlobalApps(); i++) 157 : { 158 26766 : if (multi_app->hasLocalApp(i) && multi_app->isRootProcessor()) 159 : { 160 : const Real & curr_pp_value = 161 19926 : multi_app->appProblemBase(i).getPostprocessorValueByName(_from_pp_name); 162 19926 : switch (_reduction_type) 163 : { 164 19866 : case AVERAGE: 165 : case SUM: 166 19866 : reduced_pp_value += curr_pp_value; 167 19866 : break; 168 30 : case MAXIMUM: 169 30 : reduced_pp_value = std::max(curr_pp_value, reduced_pp_value); 170 30 : break; 171 30 : case MINIMUM: 172 30 : reduced_pp_value = std::min(curr_pp_value, reduced_pp_value); 173 30 : break; 174 0 : default: 175 0 : mooseError("Can't get here unless someone adds a new enum and fails to add it to " 176 : "this switch"); 177 : } 178 : } 179 : } 180 : 181 26714 : switch (_reduction_type) 182 : { 183 26444 : case AVERAGE: 184 26444 : _communicator.sum(reduced_pp_value); 185 26444 : reduced_pp_value /= static_cast<Real>(multi_app->numGlobalApps()); 186 26444 : break; 187 218 : case SUM: 188 218 : _communicator.sum(reduced_pp_value); 189 218 : break; 190 26 : case MAXIMUM: 191 26 : _communicator.max(reduced_pp_value); 192 26 : break; 193 26 : case MINIMUM: 194 26 : _communicator.min(reduced_pp_value); 195 26 : break; 196 0 : default: 197 0 : mooseError( 198 : "Can't get here unless someone adds a new enum and fails to add it to this switch"); 199 : } 200 : 201 26714 : to_problem.setPostprocessorValueByName(_to_pp_name, reduced_pp_value); 202 26714 : break; 203 26714 : } 204 : } 205 54289 : } 206 : 207 : void 208 26 : MultiAppPostprocessorTransfer::checkSiblingsTransferSupported() const 209 : { 210 : // Check that we are in one of the supported configurations 211 : // Case 2: same number of source and target apps 212 : // The allocation of the child apps on the processors must be the same 213 26 : if (getFromMultiApp()->numGlobalApps() == getToMultiApp()->numGlobalApps()) 214 : { 215 78 : for (const auto i : make_range(getToMultiApp()->numGlobalApps())) 216 52 : if (getFromMultiApp()->hasLocalApp(i) + getToMultiApp()->hasLocalApp(i) == 1) 217 0 : mooseError("Child application allocation on parallel processes must be the same to support " 218 : "siblings postprocessor transfer"); 219 : } 220 : // Unsupported, we dont know how to choose a postprocessor value 221 : // We could default to 'any' value is good enough in the future, but it would not be reproducible 222 : // in parallel. Also every process will not necessarily have a 'source' value 223 0 : else if (getFromMultiApp()->numGlobalApps() != 1) 224 0 : mooseError("Number of source and target child apps must either match or only a single source " 225 : "app may be used"); 226 26 : }