26 #include "libmesh/rb_construction_base.h" 
   29 #include "libmesh/libmesh_logging.h" 
   30 #include "libmesh/numeric_vector.h" 
   31 #include "libmesh/equation_systems.h" 
   32 #include "libmesh/parallel.h" 
   33 #include "libmesh/petsc_linear_solver.h" 
   34 #include "libmesh/condensed_eigen_system.h" 
   35 #include "libmesh/linear_implicit_system.h" 
   36 #include "libmesh/int_range.h" 
   47                                               const std::string & name_in,
 
   48                                               const unsigned int number_in)
 
   49   : Base(es, name_in, number_in),
 
   51     serial_training_set(false),
 
   52     training_parameters_initialized(false),
 
   53     training_parameters_random_seed(-1) 
 
   55   training_parameters.clear();
 
   69   RBParametrized::clear();
 
   70   training_parameters.clear();
 
   81   inner_product_storage_vector->init (this->n_dofs(), this->n_local_dofs(), 
false, 
PARALLEL);
 
   86                                                          std::pair<numeric_index_type, Real> & error_pair)
 
   90   unsigned int proc_ID_index;
 
   91   communicator.maxloc(error_pair.second, proc_ID_index);
 
   94   communicator.broadcast(error_pair.first, proc_ID_index);
 
  102   if (training_parameters.empty())
 
  105   return training_parameters.begin()->second->size();
 
  108 template <
class Base>
 
  112   return training_parameters.begin()->second->local_size();
 
  115 template <
class Base>
 
  119   return training_parameters.begin()->second->first_local_index();
 
  122 template <
class Base>
 
  126   return training_parameters.begin()->second->last_local_index();
 
  129 template <
class Base>
 
  132   set_parameters(get_params_from_training_set(index));
 
  135 template <
class Base>
 
  140   libmesh_assert( (this->get_first_local_training_index() <= index) &&
 
  141                   (index < this->get_last_local_training_index()) );
 
  144   for (
const auto & pr : training_parameters)
 
  146       const std::string & param_name = pr.first;
 
  148       params.
set_value(param_name, param_value);
 
  152   const auto & mine = get_parameters();
 
  153   for (
const auto & pr : 
as_range(mine.extra_begin(), mine.extra_end()))
 
  159 template <
class Base>
 
  165   if ((this->get_first_local_training_index() <= index) &&
 
  166       (index < this->get_last_local_training_index()))
 
  169       set_params_from_training_set(index);
 
  172       root_id = this->processor_id();
 
  176   this->comm().max(root_id);
 
  177   broadcast_parameters(root_id);
 
  180 template <
class Base>
 
  183                                                               unsigned int n_training_samples,
 
  184                                                               std::map<std::string,bool> log_param_scale,
 
  190       libMesh::out << 
"Initializing training parameters with " 
  191                   << (deterministic ? 
"deterministic " : 
"random " )
 
  192                   << 
"training set..." << std::endl;
 
  194       for (
const auto & pr : log_param_scale)
 
  197                      << 
": log scaling = " 
  206       generate_training_parameters_deterministic(this->comm(),
 
  212                                                  serial_training_set);
 
  217       generate_training_parameters_random(this->comm(),
 
  223                                           this->training_parameters_random_seed,
 
  224                                           serial_training_set);
 
  229   if (get_n_discrete_params() > 0)
 
  231       for (
const auto & pr : training_parameters)
 
  233           const std::string & param_name = pr.first;
 
  234           if (is_discrete_parameter(param_name))
 
  236               std::vector<Real> discrete_values =
 
  237                 get_discrete_parameter_values().find(param_name)->second;
 
  246                   Real nearest_discrete_value = get_closest_value(
value, discrete_values);
 
  247                   training_vector->
set(index, nearest_discrete_value);
 
  253   training_parameters_initialized = 
true;
 
  256 template <
class Base>
 
  261   if (!training_parameters_initialized)
 
  262     libmesh_error_msg(
"Error: load_training_set cannot be used to initialize parameters");
 
  265   if (new_training_set.size() != get_n_params())
 
  266     libmesh_error_msg(
"Error: Incorrect number of parameters in load_training_set.");
 
  269   for (
auto & pr : training_parameters)
 
  270     pr.second.reset(
nullptr);
 
  274     cast_int<numeric_index_type>(new_training_set.begin()->second.size());
 
  276   this->comm().sum(n_global_training_samples);
 
  278   for (
auto & pr : training_parameters)
 
  281       pr.second->init(n_global_training_samples, n_local_training_samples, 
false, 
PARALLEL);
 
  284   for (
auto & pr : training_parameters)
 
  286       const std::string & param_name = pr.first;
 
  293           training_vector->
set(index, new_training_set[param_name][i]);
 
  299 template <
class Base>
 
  301                                                                    std::map<std::string, bool> log_param_scale,
 
  303                                                                    unsigned int n_training_samples_in,
 
  306                                                                    int training_parameters_random_seed,
 
  307                                                                    bool serial_training_set)
 
  310   const unsigned int num_params = min_parameters.
n_parameters();
 
  313   training_parameters_in.clear();
 
  318   if (training_parameters_random_seed < 0)
 
  320       if (!serial_training_set)
 
  325           std::srand( static_cast<unsigned>( std::time(0)*(1+communicator.rank()) ));
 
  334           unsigned int current_time = static_cast<unsigned>( std::time(0) );
 
  335           communicator.broadcast(current_time, 0);
 
  336           std::srand(current_time);
 
  341       if (!serial_training_set)
 
  346           std::srand( static_cast<unsigned>( training_parameters_random_seed*(1+communicator.rank()) ));
 
  352           std::srand( static_cast<unsigned>( training_parameters_random_seed ));
 
  357     for (
const auto & pr : min_parameters)
 
  359         const std::string & param_name = pr.first;
 
  362         if (!serial_training_set)
 
  365             unsigned int n_local_training_samples;
 
  366             unsigned int quotient  = n_training_samples_in/communicator.size();
 
  367             unsigned int remainder = n_training_samples_in%communicator.size();
 
  368             if (communicator.rank() < remainder)
 
  369               n_local_training_samples = (quotient + 1);
 
  371               n_local_training_samples = quotient;
 
  373             training_parameters_in[param_name]->init(n_training_samples_in, n_local_training_samples, 
false, 
PARALLEL);
 
  377             training_parameters_in[param_name]->init(n_training_samples_in, 
false, 
SERIAL);
 
  382   for (
auto & pr : training_parameters_in)
 
  384       const std::string & param_name = pr.first;
 
  391           Real random_number = static_cast<Real>(std::rand()) / RAND_MAX; 
 
  394           if (log_param_scale[param_name])
 
  396               Real log_min   = log10(min_parameters.get_value(param_name));
 
  397               Real log_range = log10(max_parameters.
get_value(param_name) / min_parameters.get_value(param_name));
 
  399               training_vector->
set(index, 
pow(10., log_min + random_number*log_range ) );
 
  404               training_vector->
set(index, random_number*(max_parameters.
get_value(param_name) - min_parameters.get_value(param_name))
 
  405                                    + min_parameters.get_value(param_name));
 
  411 template <
class Base>
 
  413                                                                           std::map<std::string, bool> log_param_scale,
 
  415                                                                           unsigned int n_training_samples_in,
 
  418                                                                           bool serial_training_set)
 
  421   const unsigned int num_params = min_parameters.
n_parameters();
 
  428       libMesh::out << 
"ERROR: Deterministic training sample generation " 
  429                    << 
" not implemented for more than two parameters." << std::endl;
 
  430       libmesh_not_implemented();
 
  434   for (
auto & pr : training_parameters_in)
 
  435     pr.second.reset(
nullptr);
 
  438     for (
const auto & pr : min_parameters)
 
  440         const std::string & param_name = pr.first;
 
  443         if (!serial_training_set)
 
  446             unsigned int n_local_training_samples;
 
  447             unsigned int quotient  = n_training_samples_in/communicator.size();
 
  448             unsigned int remainder = n_training_samples_in%communicator.size();
 
  449             if (communicator.rank() < remainder)
 
  450               n_local_training_samples = (quotient + 1);
 
  452               n_local_training_samples = quotient;
 
  454             training_parameters_in[param_name]->init(n_training_samples_in, n_local_training_samples, 
false, 
PARALLEL);
 
  458             training_parameters_in[param_name]->init(n_training_samples_in, 
false, 
SERIAL);
 
  465       bool use_log_scaling = log_param_scale.begin()->second;
 
  466       Real min_param = min_parameters.begin()->second;
 
  467       Real max_param = max_parameters.
begin()->second;
 
  475               Real epsilon = 1.e-6; 
 
  476               Real log_min   = log10(min_param + epsilon);
 
  477               Real log_range = log10( (max_param-epsilon) / (min_param+epsilon) );
 
  478               Real step_size = log_range /
 
  479                 std::max((
unsigned int)1,(n_training_samples_in-1));
 
  481               if (index<(n_training_samples_in-1))
 
  483                   training_vector->
set(index, 
pow(10., log_min + index*step_size ));
 
  489                   training_vector->
set(index, max_param);
 
  495               Real step_size = (max_param - min_param) /
 
  496                 std::max((
unsigned int)1,(n_training_samples_in-1));
 
  497               training_vector->
set(index, index*step_size + min_param);
 
  507       unsigned int n_training_parameters_per_var = static_cast<unsigned int>( 
std::sqrt(static_cast<Real>(n_training_samples_in)) );
 
  508       if ((n_training_parameters_per_var*n_training_parameters_per_var) != n_training_samples_in)
 
  509         libmesh_error_msg(
"Error: Number of training parameters = " \
 
  510                           << n_training_samples_in \
 
  512                           << 
"Deterministic training set generation with two parameters requires\n " \
 
  513                           << 
"the number of training parameters to be a perfect square.");
 
  516       std::vector<std::vector<Real>> training_parameters_matrix(num_params);
 
  519       for (
const auto & pr : min_parameters)
 
  521           const std::string & param_name = pr.first;
 
  522           Real min_param = pr.second;
 
  523           bool use_log_scaling = log_param_scale[param_name];
 
  526           training_parameters_matrix[i].resize(n_training_parameters_per_var);
 
  528           for (
unsigned int j=0; j<n_training_parameters_per_var; j++)
 
  533                   Real epsilon = 1.e-6; 
 
  534                   Real log_min   = log10(min_param + epsilon);
 
  535                   Real log_range = log10( (max_param-epsilon) / (min_param+epsilon) );
 
  536                   Real step_size = log_range /
 
  537                     std::max((
unsigned int)1,(n_training_parameters_per_var-1));
 
  539                   if (j<(n_training_parameters_per_var-1))
 
  541                       training_parameters_matrix[i][j] = 
pow(10., log_min + j*step_size );
 
  547                       training_parameters_matrix[i][j] = max_param;
 
  553                   Real step_size = (max_param - min_param) /
 
  554                     std::max((
unsigned int)1,(n_training_parameters_per_var-1));
 
  555                   training_parameters_matrix[i][j] = j*step_size + min_param;
 
  563       std::map<std::string, std::unique_ptr<NumericVector<Number>>>::iterator new_it = training_parameters_in.begin();
 
  569       for (
unsigned int index1=0; index1<n_training_parameters_per_var; index1++)
 
  571           for (
unsigned int index2=0; index2<n_training_parameters_per_var; index2++)
 
  573               unsigned int index = index1*n_training_parameters_per_var + index2;
 
  576                   (index < training_vector_0->last_local_index()))
 
  578                   training_vector_0->
set(index, training_parameters_matrix[0][index1]);
 
  579                   training_vector_1->
set(index, training_parameters_matrix[1][index2]);
 
  599 template <
class Base>
 
  602   libmesh_assert_less (proc_id, this->n_processors());
 
  608   std::vector<Real> current_parameters_vector;
 
  610   for (
const auto & pr : current_parameters)
 
  611     current_parameters_vector.push_back(pr.second);
 
  614   this->comm().broadcast(current_parameters_vector, proc_id);
 
  617   unsigned int count = 0;
 
  618   for (
const auto & pr : current_parameters)
 
  620       const std::string & param_name = pr.first;
 
  621       current_parameters.set_value(param_name, current_parameters_vector[count]);
 
  626   set_parameters(current_parameters);
 
  629 template <
class Base>
 
  632   this->training_parameters_random_seed = seed;
 
  638 #if defined(LIBMESH_HAVE_SLEPC)