LCOV - code coverage report
Current view: top level - src/reduced_basis - rb_construction_base.C (source / functions) Hit Total Coverage
Test: libMesh/libmesh: #4229 (6a9aeb) with base 727f46 Lines: 201 294 68.4 %
Date: 2025-08-19 19:27:09 Functions: 32 70 45.7 %
Legend: Lines: hit not hit

          Line data    Source code
       1             : // rbOOmit: An implementation of the Certified Reduced Basis method.
       2             : // Copyright (C) 2009, 2010 David J. Knezevic
       3             : 
       4             : // This file is part of rbOOmit.
       5             : 
       6             : // rbOOmit is free software; you can redistribute it and/or
       7             : // modify it under the terms of the GNU Lesser General Public
       8             : // License as published by the Free Software Foundation; either
       9             : // version 2.1 of the License, or (at your option) any later version.
      10             : 
      11             : // rbOOmit is distributed in the hope that it will be useful,
      12             : // but WITHOUT ANY WARRANTY; without even the implied warranty of
      13             : // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
      14             : // Lesser General Public License for more details.
      15             : 
      16             : // You should have received a copy of the GNU Lesser General Public
      17             : // License along with this library; if not, write to the Free Software
      18             : // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
      19             : 
      20             : // C++ includes
      21             : #include <algorithm>
      22             : #include <cstddef>
      23             : #include <ctime>
      24             : #include <cstdlib> // *must* precede <cmath> for proper std:abs() on PGI, Sun Studio CC
      25             : #include <cmath>
      26             : #include <iterator>
      27             : #include <memory>
      28             : #include <numeric>
      29             : 
      30             : // rbOOmit includes
      31             : #include "libmesh/rb_construction_base.h"
      32             : #include "libmesh/rb_parameters.h"
      33             : 
      34             : // libMesh includes
      35             : #include "libmesh/id_types.h"
      36             : #include "libmesh/libmesh_common.h"
      37             : #include "libmesh/numeric_vector.h"
      38             : #include "libmesh/equation_systems.h"
      39             : #include "libmesh/parallel.h"
      40             : #include "libmesh/condensed_eigen_system.h"
      41             : #include "libmesh/linear_implicit_system.h"
      42             : #include "libmesh/int_range.h"
      43             : #include "libmesh/utility.h"
      44             : 
      45             : // Nvidia C++ whining about destroying incomplete unique_ptr<T> Base::foo types
      46             : #include "libmesh/dof_map.h"
      47             : #include "libmesh/shell_matrix.h"
      48             : #include "libmesh/sparse_matrix.h"
      49             : #include "timpi/communicator.h"
      50             : 
      51             : // Anonymous namespace
      52             : namespace
      53             : {
      54             : 
      55             : /*
      56             :  * Helper function to divide a vector of samples across processors.
      57             :  * Returns a pair with num_local_samples and first_local_index.
      58             :  */
      59         277 : std::pair<unsigned int, unsigned int> calculate_n_local_samples_and_index(
      60             :     const libMesh::Parallel::Communicator &communicator,
      61             :     const unsigned int n_global_samples,
      62             :     const bool serial)
      63             : {
      64           8 :   unsigned int n_local_samples = n_global_samples;
      65           8 :   unsigned int first_local_index = 0;
      66             : 
      67         285 :   if (serial || communicator.size() == 1)
      68           9 :     return {n_local_samples, first_local_index};
      69             : 
      70             :   // Calculate the number of training parameters local to this processor
      71         276 :   unsigned int quotient  = n_global_samples/communicator.size();
      72         276 :   unsigned int remainder = n_global_samples%communicator.size();
      73         276 :   if (communicator.rank() < remainder)
      74             :     {
      75         128 :       n_local_samples = (quotient + 1);
      76         128 :       first_local_index = communicator.rank()*(quotient+1);
      77             :     }
      78             :   else
      79             :     {
      80           8 :       n_local_samples = quotient;
      81         148 :       first_local_index = communicator.rank()*quotient + remainder;
      82             :     }
      83         268 :     return {n_local_samples, first_local_index};
      84             : }
      85             : }  // end anonymous namespace
      86             : 
      87             : namespace libMesh
      88             : {
      89             : 
      90             : // ------------------------------------------------------------
      91             : // RBConstructionBase implementation
      92             : 
      93             : 
      94             : template <class Base>
      95         570 : RBConstructionBase<Base>::RBConstructionBase (EquationSystems & es,
      96             :                                               const std::string & name_in,
      97             :                                               const unsigned int number_in)
      98             :   : Base(es, name_in, number_in),
      99         538 :     quiet_mode(true),
     100         538 :     serial_training_set(false),
     101         538 :     _normalize_solution_snapshots(false),
     102         538 :     _training_parameters_initialized(false),
     103         538 :     _first_local_index(0),
     104         538 :     _n_local_training_samples(0),
     105         538 :     _n_global_training_samples(0),
     106         570 :     _training_parameters_random_seed(-1) // by default, use std::time to seed RNG
     107             : {
     108         570 : }
     109             : 
     110             : template <class Base>
     111         538 : RBConstructionBase<Base>::~RBConstructionBase () = default;
     112             : 
     113             : template <class Base>
     114           0 : void RBConstructionBase<Base>::clear ()
     115             : {
     116             :   // clear the parent data
     117           0 :   Base::clear();
     118           0 :   RBParametrized::clear();
     119           0 :   _training_parameters.clear();
     120           0 : }
     121             : 
     122             : template <class Base>
     123         570 : void RBConstructionBase<Base>::init_data ()
     124             : {
     125         570 :   Base::init_data();
     126             : 
     127             :   // Initialize the inner product storage vector, which is useful for
     128             :   // storing intermediate results when evaluating inner products
     129        1124 :   inner_product_storage_vector = NumericVector<Number>::build(this->comm());
     130         570 :   inner_product_storage_vector->init (this->n_dofs(), this->n_local_dofs(), false, PARALLEL);
     131         570 : }
     132             : 
     133             : template <class Base>
     134        4964 : void RBConstructionBase<Base>::get_global_max_error_pair(const Parallel::Communicator & communicator,
     135             :                                                          std::pair<numeric_index_type, Real> & error_pair)
     136             : {
     137             :   // Set error_pair.second to the maximum global value and also
     138             :   // find which processor contains the maximum value
     139             :   unsigned int proc_ID_index;
     140        4964 :   communicator.maxloc(error_pair.second, proc_ID_index);
     141             : 
     142             :   // Then broadcast error_pair.first from proc_ID_index
     143        4964 :   communicator.broadcast(error_pair.first, proc_ID_index);
     144        4964 : }
     145             : 
     146             : template <class Base>
     147           0 : void RBConstructionBase<Base>::set_normalize_solution_snapshots(bool value)
     148             : {
     149           0 :   _normalize_solution_snapshots = value;
     150           0 : }
     151             : 
     152             : template <class Base>
     153         285 : numeric_index_type RBConstructionBase<Base>::get_n_training_samples() const
     154             : {
     155         285 :   libmesh_error_msg_if(!_training_parameters_initialized,
     156             :                        "Error: training parameters must first be initialized.");
     157             : 
     158             :   // First we check if there are no parameters here, and in that case we
     159             :   // return 1 since a single training sample is sufficient to generate an
     160             :   // RB approximation if there are no parameters. Note that in parallel,
     161             :   // and when we don't have a serial training set, set return comm().size()
     162             :   // so that each processor is assigned a single (empty) training sample.
     163         285 :   if (_training_parameters.empty())
     164             :     {
     165           0 :       if (serial_training_set)
     166           0 :         return 1;
     167             :       else
     168           0 :         return this->comm().size();
     169             :     }
     170             : 
     171         285 :   return _n_global_training_samples;
     172             : }
     173             : 
     174             : template <class Base>
     175      417321 : numeric_index_type RBConstructionBase<Base>::get_local_n_training_samples() const
     176             : {
     177      417321 :   libmesh_error_msg_if(!_training_parameters_initialized,
     178             :                        "Error: training parameters must first be initialized.");
     179             : 
     180             :   // First we check if there are no parameters here, and in that case we
     181             :   // return 1 for both serial and parallel training sets. This is consistent
     182             :   // with get_n_training_samples(), and avoids accessing
     183             :   // training_parameters.begin() when training_parameters is empty.
     184      417321 :   if (_training_parameters.empty())
     185           0 :     return 1;
     186             : 
     187      417321 :   return _n_local_training_samples;
     188             : }
     189             : 
     190             : template <class Base>
     191      826396 : numeric_index_type RBConstructionBase<Base>::get_first_local_training_index() const
     192             : {
     193      826396 :   libmesh_error_msg_if(!_training_parameters_initialized,
     194             :                        "Error: training parameters must first be initialized.");
     195             : 
     196             :   // First we check if there are no parameters here, and in that case we
     197             :   // return 0 for a serial training set and comm().rank() for a parallel
     198             :   // training set. This is consistent with get_n_training_samples(), and
     199             :   // avoids accessing training_parameters.begin() when training_parameters
     200             :   // is empty.
     201      826396 :   if (_training_parameters.empty())
     202             :     {
     203           0 :       if (serial_training_set)
     204           0 :         return 0;
     205             :       else
     206           0 :         return this->comm().rank();
     207             :     }
     208             : 
     209      826396 :   return _first_local_index;
     210             : }
     211             : 
     212             : template <class Base>
     213      411160 : numeric_index_type RBConstructionBase<Base>::get_last_local_training_index() const
     214             : {
     215      411160 :   libmesh_error_msg_if(!_training_parameters_initialized,
     216             :                        "Error: training parameters must first be initialized.");
     217             : 
     218      411160 :   if (_training_parameters.empty())
     219           0 :     return 0;
     220             : 
     221      411160 :   return _first_local_index + _n_local_training_samples;
     222             : }
     223             : 
     224             : template <class Base>
     225      408234 : void RBConstructionBase<Base>::set_params_from_training_set(unsigned int global_index)
     226             : {
     227      408234 :   set_parameters(get_params_from_training_set(global_index));
     228      408234 : }
     229             : 
     230             : template <class Base>
     231      408234 : RBParameters RBConstructionBase<Base>::get_params_from_training_set(unsigned int global_index)
     232             : {
     233      408234 :   libmesh_error_msg_if(!_training_parameters_initialized,
     234             :                        "Error: training parameters must first be initialized.");
     235             : 
     236             :   // If the _training_parameters are empty, return an empty RBParameters.
     237             :   // Otherwise, create a new RBParameters object from the single sample requested.
     238      408234 :   RBParameters params;
     239      408234 :   if (!_training_parameters.empty())
     240             :     {
     241      408234 :       libmesh_error_msg_if((global_index < this->get_first_local_training_index()) ||
     242             :                            (global_index >= this->get_last_local_training_index()),
     243             :                            "Error: index "
     244             :                            << global_index
     245             :                            << " must be within range: "
     246             :                            << this->get_first_local_training_index()
     247             :                            << " - "
     248             :                            << this->get_last_local_training_index());
     249             : 
     250      408234 :       const numeric_index_type local_index = global_index - get_first_local_training_index();
     251     3027916 :       for (const auto & [param_name, sample_vector] : _training_parameters)
     252     2837972 :         params.set_value(param_name, sample_vector[local_index]);
     253             : 
     254             :       // Copy all extra values into the new RBParameters.
     255             :       // We assume that the samples may be indexed differently for extra parameters,
     256             :       // so we don't just copy the local_index value.
     257      408234 :       const auto & mine = get_parameters();
     258      408234 :       for (const auto & [key, extra_sample_vector] :
     259      408234 :            as_range(mine.extra_begin(), mine.extra_end()))
     260             :         {
     261           0 :           for (const auto idx : index_range(extra_sample_vector))
     262           0 :             params.set_extra_value(key, idx, extra_sample_vector[idx]);
     263             :         }
     264             :     }
     265             : 
     266      408234 :   return params;
     267           0 : }
     268             : 
     269             : template <class Base>
     270           7 : void RBConstructionBase<Base>::set_params_from_training_set_and_broadcast(unsigned int global_index)
     271             : {
     272           7 :   libmesh_error_msg_if(!_training_parameters_initialized,
     273             :                        "Error: training parameters must first be initialized.");
     274             : 
     275           7 :   processor_id_type root_id = 0;
     276          14 :   if ((this->get_first_local_training_index() <= global_index) &&
     277           7 :       (global_index < this->get_last_local_training_index()))
     278             :     {
     279             :       // Set parameters on only one processor
     280           7 :       set_params_from_training_set(global_index);
     281             : 
     282             :       // set root_id, only non-zero on one processor
     283           7 :       root_id = this->processor_id();
     284             :     }
     285             : 
     286             :   // broadcast
     287           7 :   this->comm().max(root_id);
     288           7 :   broadcast_parameters(root_id);
     289           7 : }
     290             : 
     291             : template <class Base>
     292         285 : void RBConstructionBase<Base>::initialize_training_parameters(const RBParameters & mu_min,
     293             :                                                               const RBParameters & mu_max,
     294             :                                                               const unsigned int n_global_training_samples,
     295             :                                                               const std::map<std::string,bool> & log_param_scale,
     296             :                                                               const bool deterministic)
     297             : {
     298         285 :   if (!is_quiet())
     299             :     {
     300             :       // Print out some info about the training set initialization
     301           0 :       libMesh::out << "Initializing training parameters with "
     302           0 :                   << (deterministic ? "deterministic " : "random " )
     303           0 :                   << "training set..." << std::endl;
     304             : 
     305           0 :       for (const auto & pr : log_param_scale)
     306           0 :         libMesh::out << "Parameter "
     307           0 :                      << pr.first
     308           0 :                      << ": log scaling = "
     309           0 :                      << pr.second
     310           0 :                      << std::endl;
     311             : 
     312           0 :       libMesh::out << std::endl;
     313             :     }
     314             : 
     315         285 :   if (deterministic)
     316             :     {
     317           4 :       const auto [first_local_index, last_local_index] =
     318         149 :         generate_training_parameters_deterministic(this->comm(),
     319             :                                                    log_param_scale,
     320         141 :                                                    _training_parameters,
     321             :                                                    n_global_training_samples,
     322             :                                                    mu_min,
     323             :                                                    mu_max,
     324         141 :                                                    serial_training_set);
     325         141 :       _first_local_index = first_local_index;
     326         141 :       _n_local_training_samples = last_local_index-first_local_index;
     327             :     }
     328             :   else
     329             :     {
     330             :       // Generate random training samples for all parameters
     331           4 :       const auto [first_local_index, last_local_index] =
     332         152 :         generate_training_parameters_random(this->comm(),
     333             :                                             log_param_scale,
     334         144 :                                             _training_parameters,
     335             :                                             n_global_training_samples,
     336             :                                             mu_min,
     337             :                                             mu_max,
     338             :                                             this->_training_parameters_random_seed,
     339         144 :                                             serial_training_set);
     340         144 :       _first_local_index = first_local_index;
     341         144 :       _n_local_training_samples = last_local_index-first_local_index;
     342             :     }
     343         285 :   _n_global_training_samples = _n_local_training_samples;
     344             : 
     345         285 :   if (!serial_training_set)
     346         285 :     this->comm().sum(_n_global_training_samples);
     347             : 
     348             :   // For each parameter that only allows discrete values, we "snap" to the nearest
     349             :   // allowable discrete value
     350         285 :   if (get_n_discrete_params() > 0)
     351             :     {
     352           0 :       for (auto & [param_name, sample_vector] : _training_parameters)
     353             :         {
     354           0 :           if (is_discrete_parameter(param_name))
     355             :             {
     356             :               const std::vector<Real> & discrete_values =
     357           0 :                 libmesh_map_find(get_discrete_parameter_values(), param_name);
     358             : 
     359           0 :               for (const auto sample_idx : index_range(sample_vector))
     360             :                 {
     361             :                   // Round all values to the closest discrete value.
     362           0 :                   std::vector<Real> discretized_vector(sample_vector[sample_idx].size());
     363           0 :                   std::transform(sample_vector[sample_idx].cbegin(),
     364           0 :                                  sample_vector[sample_idx].cend(),
     365             :                                  discretized_vector.begin(),
     366           0 :                                  [&discrete_values](const Real & val) {
     367           0 :                                    return get_closest_value(val, discrete_values);
     368             :                                  });
     369           0 :                   sample_vector[sample_idx] = discretized_vector;
     370             :                 }
     371             :             }
     372             :         }
     373             :     }
     374             : 
     375         285 :   _training_parameters_initialized = true;
     376         285 : }
     377             : 
     378             : template <class Base>
     379           0 : void RBConstructionBase<Base>::load_training_set(const std::map<std::string, std::vector<RBParameter>> & new_training_set)
     380             : {
     381             :   // Make sure we're running this on all processors at the same time
     382           0 :   libmesh_parallel_only(this->comm());
     383             : 
     384             :   // First, make sure that an initial training set has already been generated
     385           0 :   libmesh_error_msg_if(!_training_parameters_initialized,
     386             :                        "Error: load_training_set cannot be used to initialize parameters");
     387             : 
     388             :   // Make sure that the training set has the correct number of parameters
     389           0 :   const unsigned int n_params = get_n_params();
     390           0 :   libmesh_error_msg_if(new_training_set.size() > n_params,
     391             :                        "Error: new_training_set should not have more than get_n_params() parameters.");
     392             : 
     393             :   // Check that (new_training_set.size() == get_n_params()) is the same on all processes so that
     394             :   // we go into the same branch of the "if" statement below on all processes.
     395           0 :   const bool size_matches = (new_training_set.size() == n_params);
     396           0 :   libmesh_assert(this->comm().verify(size_matches));
     397             : 
     398           0 :   if (size_matches)
     399             :     {
     400             :       // If new_training_set stores values for all parameters, then we overwrite
     401             :       // _training_parameters with new_training_set.
     402             : 
     403             :       // Get the number of local and global training parameters
     404           0 :       _first_local_index = 0;
     405           0 :       _n_local_training_samples =
     406           0 :         cast_int<numeric_index_type>(new_training_set.begin()->second.size());
     407           0 :       _n_global_training_samples = _n_local_training_samples;
     408             : 
     409           0 :       if (!serial_training_set)
     410             :         {
     411           0 :           this->comm().sum(_n_global_training_samples);
     412             : 
     413             :           // Set the first/last indices.
     414           0 :           std::vector<numeric_index_type> local_sizes (this->n_processors(), 0);
     415           0 :           local_sizes[this->processor_id()] = _n_local_training_samples;
     416           0 :           this->comm().sum(local_sizes);
     417             : 
     418             :           // first_local_index is the sum of local_sizes
     419             :           // for all processor ids less than ours
     420           0 :           for (auto p : make_range(this->processor_id()))
     421           0 :             _first_local_index += local_sizes[p];
     422             :         }
     423             : 
     424             :       // Ensure that the parameters are the same.
     425           0 :       for (const auto & pr : _training_parameters)
     426           0 :         libmesh_error_msg_if(!new_training_set.count(pr.first),
     427             :                              "Parameters must be identical in order to overwrite dataset.");
     428             : 
     429             :       // Copy the values from the new_training_set to the internal training_parameters.
     430           0 :       _training_parameters = new_training_set;
     431             :     }
     432             :   else
     433             :     {
     434             :       // If new_training_set stores values for a subset of the parameters, then we keep the
     435             :       // length of training_parameters unchanged and overwrite the entries of the specified
     436             :       // parameters from new_training_set. Note that we repeatedly loop over new_training_set
     437             :       // to fill up the entire length of the sample_vector.
     438           0 :       for (auto & [param_name, sample_vector]: _training_parameters)
     439             :         {
     440           0 :           if (new_training_set.count(param_name))
     441             :             {
     442           0 :               for (const auto i : make_range(get_local_n_training_samples()))
     443             :                 {
     444           0 :                   const unsigned int num_new_samples = libmesh_map_find(new_training_set,param_name).size();
     445           0 :                   libmesh_error_msg_if (num_new_samples==0, "new_training_set set should not be empty");
     446             : 
     447           0 :                   const unsigned int new_training_set_index = i % num_new_samples;
     448           0 :                   sample_vector[i] = libmesh_map_find(new_training_set,param_name)[new_training_set_index];
     449             :                 }
     450             :             }
     451             :         }
     452             :     }
     453           0 : }
     454             : 
     455             : template <class Base>
     456           0 : void RBConstructionBase<Base>::set_training_parameter_values(
     457             :   const std::string & param_name, const std::vector<RBParameter> & values)
     458             : {
     459           0 :   libmesh_error_msg_if(!_training_parameters_initialized,
     460             :     "Training parameters must be initialized before calling set_training_parameter_values");
     461           0 :   libmesh_error_msg_if(values.size() != get_local_n_training_samples(),
     462             :     "Inconsistent sizes");
     463             : 
     464             :   // Copy the new data, overwriting the old data.
     465           0 :   auto & training_vector = libmesh_map_find(_training_parameters, param_name);
     466           0 :   training_vector = values;
     467           0 : }
     468             : 
     469             : 
     470             : template <class Base>
     471             : std::pair<std::size_t, std::size_t>
     472         144 : RBConstructionBase<Base>::generate_training_parameters_random(const Parallel::Communicator & communicator,
     473             :                                                               const std::map<std::string, bool> & log_param_scale,
     474             :                                                               std::map<std::string, std::vector<RBParameter>> & local_training_parameters_in,
     475             :                                                               const unsigned int n_global_training_samples_in,
     476             :                                                               const RBParameters & min_parameters,
     477             :                                                               const RBParameters & max_parameters,
     478             :                                                               const int training_parameters_random_seed,
     479             :                                                               const bool serial_training_set)
     480             : {
     481         144 :   const unsigned int num_params = min_parameters.n_parameters();
     482         144 :   libmesh_error_msg_if(num_params!=max_parameters.n_parameters(),
     483             :     "Number of parameters must be identical for min/max.");
     484             : 
     485             :   // Clear training_parameters_in
     486           4 :   local_training_parameters_in.clear();
     487             : 
     488         144 :   if (num_params == 0)
     489           0 :     return {0,0};
     490             : 
     491         144 :   if (training_parameters_random_seed < 0)
     492             :     {
     493         142 :       if (!serial_training_set)
     494             :         {
     495             :           // seed the random number generator with the system time
     496             :           // and the processor ID so that the seed is different
     497             :           // on different processors
     498         142 :           std::srand( static_cast<unsigned>( std::time(0)*(1+communicator.rank()) ));
     499             :         }
     500             :       else
     501             :         {
     502             :           // seed the random number generator with the system time
     503             :           // only so that the seed is the same on all processors
     504             :           //
     505             :           // Note that we broadcast the time on processor 0 to make
     506             :           // sure all processors agree.
     507           0 :           unsigned int current_time = static_cast<unsigned>( std::time(0) );
     508           0 :           communicator.broadcast(current_time, 0);
     509           0 :           std::srand(current_time);
     510             :         }
     511             :     }
     512             :   else
     513             :     {
     514           2 :       if (!serial_training_set)
     515             :         {
     516             :           // seed the random number generator with the provided value
     517             :           // and the processor ID so that the seed is different
     518             :           // on different processors
     519           2 :           std::srand( static_cast<unsigned>( training_parameters_random_seed*(1+communicator.rank()) ));
     520             :         }
     521             :       else
     522             :         {
     523             :           // seed the random number generator with the provided value
     524             :           // so that the seed is the same on all processors
     525           0 :           std::srand( static_cast<unsigned>( training_parameters_random_seed ));
     526             :         }
     527             :     }
     528             : 
     529             :   // TODO - we don't support vector-data here yet. This would only apply in the case where
     530             :   //        min or max are vector-valued, and all the generated points need to stay within those ranges.
     531             :   //        But typically we expect that if we're calling this function, we only have 1 min and 1 max,
     532             :   //        so the generated values are single-valued as well. The .get_value() calls will throw an error
     533             :   //        if this is not the case.
     534             : 
     535             :   // initialize training_parameters_in
     536           4 :   const auto & [n_local_training_samples, first_local_index] =
     537         140 :       calculate_n_local_samples_and_index(communicator, n_global_training_samples_in,
     538             :                                           serial_training_set);
     539        1144 :   for (const auto & pr : min_parameters)
     540        1972 :     local_training_parameters_in[pr.first] = std::vector<RBParameter>(n_local_training_samples);
     541             : 
     542             :   // finally, set the values
     543        1144 :   for (auto & [param_name, sample_vector] : local_training_parameters_in)
     544             :     {
     545      169600 :       for (auto i : make_range(n_local_training_samples))
     546             :         {
     547      168600 :           Real random_number = static_cast<Real>(std::rand()) / RAND_MAX; // in range [0,1]
     548             : 
     549             :           // Generate log10 scaled training parameters
     550      168600 :           if (libmesh_map_find(log_param_scale, param_name))
     551             :             {
     552           0 :               Real log_min   = std::log10(min_parameters.get_value(param_name));
     553           0 :               Real log_range = std::log10(max_parameters.get_value(param_name) / min_parameters.get_value(param_name));
     554             : 
     555           0 :               sample_vector[i] = {std::pow(Real(10.), log_min + random_number*log_range )};
     556             :             }
     557             :           // Generate linearly scaled training parameters
     558             :           else
     559             :             {
     560      168600 :               sample_vector[i] = {
     561      168600 :                   random_number * (max_parameters.get_value(param_name) -
     562      309200 :                                    min_parameters.get_value(param_name)) +
     563      140600 :                   min_parameters.get_value(param_name)};
     564             :             }
     565             :         }
     566             :     }
     567         148 :   return {first_local_index, first_local_index+n_local_training_samples};
     568             : }
     569             : 
     570             : template <class Base>
     571             : std::pair<std::size_t, std::size_t>
     572         141 : RBConstructionBase<Base>::generate_training_parameters_deterministic(const Parallel::Communicator & communicator,
     573             :                                                                      const std::map<std::string, bool> & log_param_scale,
     574             :                                                                      std::map<std::string, std::vector<RBParameter>> & local_training_parameters_in,
     575             :                                                                      const unsigned int n_global_training_samples_in,
     576             :                                                                      const RBParameters & min_parameters,
     577             :                                                                      const RBParameters & max_parameters,
     578             :                                                                      const bool serial_training_set)
     579             : {
     580           4 :   libmesh_assert_equal_to ( min_parameters.n_parameters(), max_parameters.n_parameters() );
     581         141 :   const unsigned int num_params = min_parameters.n_parameters();
     582             : 
     583         141 :   if (num_params == 0)
     584           0 :     return {0,0};
     585             : 
     586         141 :   if (num_params > 3)
     587           0 :     libmesh_not_implemented_msg("ERROR: Deterministic training sample generation "
     588             :                                 "not implemented for more than three parameters.");
     589             : 
     590             :   // TODO - we don't support vector-data here yet. This would only apply in the case where
     591             :   //        min or max are vector-valued, and all the generated points need to stay within those ranges.
     592             :   //        But typically we expect that if we're calling this function, we only have 1 min and 1 max,
     593             :   //        so the generated values are single-valued as well. The .get_value() calls will throw an error
     594             :   //        if this is not the case.
     595             : 
     596             :   // Reinitialize training_parameters_in (but don't remove existing keys!)
     597           4 :   const auto &[n_local_training_samples, first_local_index] =
     598         137 :       calculate_n_local_samples_and_index(communicator, n_global_training_samples_in,
     599             :                                          serial_training_set);
     600         141 :   const auto last_local_index = first_local_index + n_local_training_samples;
     601         423 :   for (const auto & pr : min_parameters)
     602         556 :     local_training_parameters_in[pr.first] = std::vector<RBParameter>(n_local_training_samples);
     603             : 
     604             :   // n_training_samples_per_param has 3 entries, but entries after "num_params"
     605             :   // are unused so we just set their value to 1. We need to set it to 1 (rather
     606             :   // than 0) so that we don't skip the inner part of the triply-nested loop over
     607             :   // n_training_samples_per_param below.
     608         145 :   std::vector<unsigned int> n_training_samples_per_param(3);
     609         564 :   for (unsigned int param=0; param<3; param++)
     610             :     {
     611         423 :       if (param < num_params)
     612             :         {
     613         282 :           n_training_samples_per_param[param] =
     614         282 :             static_cast<unsigned int>( std::round(std::pow(static_cast<Real>(n_global_training_samples_in), 1./num_params)) );
     615             :         }
     616             :       else
     617             :         {
     618         145 :           n_training_samples_per_param[param] = 1;
     619             :         }
     620             :     }
     621             : 
     622             :   {
     623             :     // The current implementation assumes that we have the same number of
     624             :     // samples in each parameter, so we check that n_training_samples_in
     625             :     // is consistent with this assumption.
     626           4 :     unsigned int total_samples_check = 1;
     627         564 :     for (unsigned int n_samples : n_training_samples_per_param)
     628             :       {
     629         423 :         total_samples_check *= n_samples;
     630             :       }
     631             : 
     632         141 :     libmesh_error_msg_if(total_samples_check != n_global_training_samples_in,
     633             :                          "Error: Number of training samples = "
     634             :                          << n_global_training_samples_in
     635             :                          << " does not enable a uniform grid of samples with "
     636             :                          << num_params << " parameters. Try "
     637             :                          << total_samples_check << " samples instead?");
     638             :   }
     639             : 
     640             :   // First we make a list of training samples associated with each parameter,
     641             :   // then we take a tensor product to obtain the final set of training samples.
     642         141 :   std::vector<std::vector<Real>> training_samples_per_param(num_params);
     643             :   {
     644           4 :     unsigned int i = 0;
     645         423 :     for (const auto & pr : min_parameters)
     646             :       {
     647         282 :         const std::string & param_name = pr.first;
     648         282 :         const bool use_log_scaling = libmesh_map_find(log_param_scale, param_name);
     649         282 :         Real min_param = min_parameters.get_value(param_name);
     650         282 :         Real max_param = max_parameters.get_value(param_name);
     651             : 
     652         298 :         training_samples_per_param[i].resize(n_training_samples_per_param[i]);
     653             : 
     654        3110 :         for (unsigned int j=0; j<n_training_samples_per_param[i]; j++)
     655             :           {
     656             :             // Generate log10 scaled training parameters
     657        2820 :             if (use_log_scaling)
     658             :               {
     659           0 :                 Real epsilon = 1.e-6; // Prevent rounding errors triggering asserts
     660           0 :                 Real log_min   = std::log10(min_param + epsilon);
     661           0 :                 Real log_range = std::log10( (max_param-epsilon) / (min_param+epsilon) );
     662           0 :                 Real step_size = log_range /
     663           0 :                   std::max((unsigned int)1,(n_training_samples_per_param[i]-1));
     664             : 
     665           0 :                 if (j<(n_training_samples_per_param[i]-1))
     666             :                   {
     667           0 :                     training_samples_per_param[i][j] = std::pow(10., log_min + j*step_size );
     668             :                   }
     669             :                 else
     670             :                   {
     671             :                     // due to rounding error, the last parameter can be slightly
     672             :                     // bigger than max_parameters, hence snap back to the max
     673           0 :                     training_samples_per_param[i][j] = max_param;
     674             :                   }
     675             :               }
     676             :             else
     677             :               {
     678             :                 // Generate linearly scaled training parameters
     679        5640 :                 Real step_size = (max_param - min_param) /
     680        2820 :                   std::max((unsigned int)1,(n_training_samples_per_param[i]-1));
     681        2980 :                 training_samples_per_param[i][j] = j*step_size + min_param;
     682             :               }
     683             : 
     684             :           }
     685         282 :         i++;
     686             :       }
     687             :   }
     688             : 
     689             :   // Now load into training_samples_in
     690             :   {
     691         145 :     std::vector<unsigned int> indices(3);
     692           4 :     unsigned int index_count = 0;
     693        1551 :     for (indices[0]=0; indices[0]<n_training_samples_per_param[0]; indices[0]++)
     694             :       {
     695       15510 :         for (indices[1]=0; indices[1]<n_training_samples_per_param[1]; indices[1]++)
     696             :           {
     697       28200 :             for (indices[2]=0; indices[2]<n_training_samples_per_param[2]; indices[2]++)
     698             :               {
     699         400 :                 unsigned int param_count = 0;
     700       42300 :                 for (const auto & pr : min_parameters)
     701             :                   {
     702             :                     std::vector<RBParameter> & training_vector =
     703       28200 :                       libmesh_map_find(local_training_parameters_in, pr.first);
     704       28200 :                     if (first_local_index <= index_count && index_count < last_local_index)
     705        5800 :                       training_vector[index_count - first_local_index] =
     706             :                         {training_samples_per_param[param_count][indices[param_count]]};
     707             : 
     708       28200 :                     param_count++;
     709             :                   }
     710       14100 :                 index_count++;
     711             :               }
     712             :           }
     713             :       }
     714             :   }
     715         141 :   return {first_local_index, first_local_index+n_local_training_samples};
     716         133 : }
     717             : 
     718             : 
     719             : template <class Base>
     720        4964 : void RBConstructionBase<Base>::broadcast_parameters(const unsigned int proc_id)
     721             : {
     722         140 :   libmesh_assert_less (proc_id, this->n_processors());
     723             : 
     724             :   // create a copy of the current parameters
     725        5244 :   RBParameters current_parameters = get_parameters();
     726        4964 :   libmesh_error_msg_if(current_parameters.n_samples()!=1,
     727             :       "Only single-sample RBParameter objects can be broadcast.");
     728             : 
     729             :   // Serialize the current_parameters to current_parameters_vector in order to broadcast.
     730             :   // We handle multiple samples and vector values.
     731             :   // However, the vector values are assumed to remain the same size across samples.
     732        4964 :   const std::size_t nparams = current_parameters.n_parameters();
     733        4964 :   const std::size_t nsamples = current_parameters.n_samples();
     734             : 
     735             :   // First we get the sizes of all the parameter value vectors.
     736         280 :   std::vector<std::size_t> param_value_sizes;
     737        4964 :   param_value_sizes.reserve(nparams);
     738       25556 :   for (const auto & pr : current_parameters)
     739       21172 :     param_value_sizes.push_back(pr.second[0].size());
     740             : 
     741             :   // Broadcast the sizes vector and reserve memory.
     742        4964 :   this->comm().broadcast(param_value_sizes, proc_id);
     743         280 :   std::size_t buffsize = std::accumulate(param_value_sizes.cbegin(), param_value_sizes.cend(), 0ul);
     744         280 :   std::vector<Real> serialized_parameters;
     745        4964 :   serialized_parameters.reserve(buffsize);
     746             : 
     747             :   // Then we serialize the parameters/sample/value vectors into a single vector.
     748       25556 :   for (const auto & pr : current_parameters)
     749             :     {
     750       41184 :       for (const auto sample_idx : make_range(nsamples))
     751       21752 :         serialized_parameters.insert(serialized_parameters.end(),
     752         580 :                                      pr.second[sample_idx].cbegin(),
     753        1160 :                                      pr.second[sample_idx].cend());
     754             :     }
     755             : 
     756             :   // Do the broadcasts.
     757        4964 :   this->comm().broadcast(serialized_parameters, proc_id);
     758             : 
     759             :   // Deserialize into the copy of the RBParameters object.
     760         140 :   std::size_t param_idx = 0;
     761         280 :   auto val_idx = serialized_parameters.cbegin();
     762       25556 :   for (const auto & pr : current_parameters)
     763             :     {
     764       21172 :       const std::size_t param_value_size = param_value_sizes[param_idx];
     765       41184 :       for (const auto sample_idx: make_range(nsamples))
     766             :         {
     767       20592 :           auto end_val_idx = std::next(val_idx,param_value_size);
     768       20592 :           RBParameter sample_val(val_idx, end_val_idx);
     769       20592 :           current_parameters.set_value(pr.first, sample_idx, sample_val);
     770         580 :           val_idx = end_val_idx;
     771             :         }
     772       20592 :       ++param_idx;
     773             :     }
     774             : 
     775             :   // Overwrite the parameters globally.
     776        4964 :   set_parameters(current_parameters);
     777        4964 : }
     778             : 
     779             : template <class Base>
     780         285 : void RBConstructionBase<Base>::set_training_random_seed(int seed)
     781             : {
     782         285 :   this->_training_parameters_random_seed = seed;
     783         285 : }
     784             : 
     785             : // Template specializations
     786             : 
     787             : // EigenSystem is only defined if we have SLEPc
     788             : #if defined(LIBMESH_HAVE_SLEPC)
     789             : template class LIBMESH_EXPORT RBConstructionBase<CondensedEigenSystem>;
     790             : #endif
     791             : 
     792             : template class LIBMESH_EXPORT RBConstructionBase<LinearImplicitSystem>;
     793             : template class LIBMESH_EXPORT RBConstructionBase<System>;
     794             : 
     795             : } // namespace libMesh

Generated by: LCOV version 1.14