libMesh
rb_construction_base.C
Go to the documentation of this file.
1 // rbOOmit: An implementation of the Certified Reduced Basis method.
2 // Copyright (C) 2009, 2010 David J. Knezevic
3 
4 // This file is part of rbOOmit.
5 
6 // rbOOmit is free software; you can redistribute it and/or
7 // modify it under the terms of the GNU Lesser General Public
8 // License as published by the Free Software Foundation; either
9 // version 2.1 of the License, or (at your option) any later version.
10 
11 // rbOOmit is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 // Lesser General Public License for more details.
15 
16 // You should have received a copy of the GNU Lesser General Public
17 // License along with this library; if not, write to the Free Software
18 // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 
20 // C++ includes
21 #include <ctime>
22 #include <cstdlib> // *must* precede <cmath> for proper std:abs() on PGI, Sun Studio CC
23 #include <cmath>
24 
25 // rbOOmit includes
26 #include "libmesh/rb_construction_base.h"
27 
28 // libMesh includes
29 #include "libmesh/libmesh_logging.h"
30 #include "libmesh/numeric_vector.h"
31 #include "libmesh/equation_systems.h"
32 #include "libmesh/parallel.h"
33 #include "libmesh/petsc_linear_solver.h"
34 #include "libmesh/condensed_eigen_system.h"
35 #include "libmesh/linear_implicit_system.h"
36 #include "libmesh/int_range.h"
37 
38 namespace libMesh
39 {
40 
41 // ------------------------------------------------------------
42 // RBConstructionBase implementation
43 
44 
45 template <class Base>
47  const std::string & name_in,
48  const unsigned int number_in)
49  : Base(es, name_in, number_in),
50  quiet_mode(true),
51  serial_training_set(false),
52  training_parameters_initialized(false),
53  training_parameters_random_seed(-1) // by default, use std::time to seed RNG
54 {
55  training_parameters.clear();
56 }
57 
58 template <class Base>
60 {
61  this->clear();
62 }
63 
64 template <class Base>
66 {
67  // clear the parent data
68  Base::clear();
69  RBParametrized::clear();
70  training_parameters.clear();
71 }
72 
73 template <class Base>
75 {
76  Base::init_data();
77 
78  // Initialize the inner product storage vector, which is useful for
79  // storing intermediate results when evaluating inner products
80  inner_product_storage_vector = NumericVector<Number>::build(this->comm());
81  inner_product_storage_vector->init (this->n_dofs(), this->n_local_dofs(), false, PARALLEL);
82 }
83 
84 template <class Base>
85 void RBConstructionBase<Base>::get_global_max_error_pair(const Parallel::Communicator & communicator,
86  std::pair<numeric_index_type, Real> & error_pair)
87 {
88  // Set error_pair.second to the maximum global value and also
89  // find which processor contains the maximum value
90  unsigned int proc_ID_index;
91  communicator.maxloc(error_pair.second, proc_ID_index);
92 
93  // Then broadcast error_pair.first from proc_ID_index
94  communicator.broadcast(error_pair.first, proc_ID_index);
95 }
96 
97 template <class Base>
99 {
100  libmesh_assert(training_parameters_initialized);
101 
102  if (training_parameters.empty())
103  return 0;
104 
105  return training_parameters.begin()->second->size();
106 }
107 
108 template <class Base>
110 {
111  libmesh_assert(training_parameters_initialized);
112  return training_parameters.begin()->second->local_size();
113 }
114 
115 template <class Base>
117 {
118  libmesh_assert(training_parameters_initialized);
119  return training_parameters.begin()->second->first_local_index();
120 }
121 
122 template <class Base>
124 {
125  libmesh_assert(training_parameters_initialized);
126  return training_parameters.begin()->second->last_local_index();
127 }
128 
129 template <class Base>
131 {
132  set_parameters(get_params_from_training_set(index));
133 }
134 
135 template <class Base>
137 {
138  libmesh_assert(training_parameters_initialized);
139 
140  libmesh_assert( (this->get_first_local_training_index() <= index) &&
141  (index < this->get_last_local_training_index()) );
142 
143  RBParameters params;
144  for (const auto & pr : training_parameters)
145  {
146  const std::string & param_name = pr.first;
147  Real param_value = libmesh_real((*(pr.second))(index));
148  params.set_value(param_name, param_value);
149  }
150 
151  // Add potential extra values
152  const auto & mine = get_parameters();
153  for (const auto & pr : as_range(mine.extra_begin(), mine.extra_end()))
154  params.set_extra_value(pr.first, pr.second);
155 
156  return params;
157 }
158 
159 template <class Base>
161 {
162  libmesh_assert(training_parameters_initialized);
163 
164  processor_id_type root_id = 0;
165  if ((this->get_first_local_training_index() <= index) &&
166  (index < this->get_last_local_training_index()))
167  {
168  // Set parameters on only one processor
169  set_params_from_training_set(index);
170 
171  // set root_id, only non-zero on one processor
172  root_id = this->processor_id();
173  }
174 
175  // broadcast
176  this->comm().max(root_id);
177  broadcast_parameters(root_id);
178 }
179 
180 template <class Base>
182  const RBParameters & mu_max,
183  unsigned int n_training_samples,
184  std::map<std::string,bool> log_param_scale,
185  bool deterministic)
186 {
187  if(!is_quiet())
188  {
189  // Print out some info about the training set initialization
190  libMesh::out << "Initializing training parameters with "
191  << (deterministic ? "deterministic " : "random " )
192  << "training set..." << std::endl;
193 
194  for (const auto & pr : log_param_scale)
195  libMesh::out << "Parameter "
196  << pr.first
197  << ": log scaling = "
198  << pr.second
199  << std::endl;
200 
201  libMesh::out << std::endl;
202  }
203 
204  if (deterministic)
205  {
206  generate_training_parameters_deterministic(this->comm(),
207  log_param_scale,
208  training_parameters,
209  n_training_samples,
210  mu_min,
211  mu_max,
212  serial_training_set);
213  }
214  else
215  {
216  // Generate random training samples for all parameters
217  generate_training_parameters_random(this->comm(),
218  log_param_scale,
219  training_parameters,
220  n_training_samples,
221  mu_min,
222  mu_max,
223  this->training_parameters_random_seed,
224  serial_training_set);
225  }
226 
227  // For each parameter that only allows discrete values, we "snap" to the nearest
228  // allowable discrete value
229  if (get_n_discrete_params() > 0)
230  {
231  for (const auto & pr : training_parameters)
232  {
233  const std::string & param_name = pr.first;
234  if (is_discrete_parameter(param_name))
235  {
236  std::vector<Real> discrete_values =
237  get_discrete_parameter_values().find(param_name)->second;
238 
239  NumericVector<Number> * training_vector = pr.second.get();
240 
241  for (numeric_index_type index=training_vector->first_local_index();
242  index<training_vector->last_local_index();
243  index++)
244  {
245  Real value = libmesh_real((*training_vector)(index));
246  Real nearest_discrete_value = get_closest_value(value, discrete_values);
247  training_vector->set(index, nearest_discrete_value);
248  }
249  }
250  }
251  }
252 
253  training_parameters_initialized = true;
254 }
255 
256 template <class Base>
257 void RBConstructionBase<Base>::load_training_set(std::map<std::string, std::vector<Number>> & new_training_set)
258 {
259  // First, make sure that an initial training set has already been
260  // generated
261  if (!training_parameters_initialized)
262  libmesh_error_msg("Error: load_training_set cannot be used to initialize parameters");
263 
264  // Make sure that the training set has the correct number of parameters
265  if (new_training_set.size() != get_n_params())
266  libmesh_error_msg("Error: Incorrect number of parameters in load_training_set.");
267 
268  // Delete the training set vectors (but don't remove the existing keys!)
269  for (auto & pr : training_parameters)
270  pr.second.reset(nullptr);
271 
272  // Get the number of local and global training parameters
273  numeric_index_type n_local_training_samples =
274  cast_int<numeric_index_type>(new_training_set.begin()->second.size());
275  numeric_index_type n_global_training_samples = n_local_training_samples;
276  this->comm().sum(n_global_training_samples);
277 
278  for (auto & pr : training_parameters)
279  {
280  pr.second = NumericVector<Number>::build(this->comm());
281  pr.second->init(n_global_training_samples, n_local_training_samples, false, PARALLEL);
282  }
283 
284  for (auto & pr : training_parameters)
285  {
286  const std::string & param_name = pr.first;
287  NumericVector<Number> * training_vector = pr.second.get();
288 
289  numeric_index_type first_index = training_vector->first_local_index();
290  for (numeric_index_type i=0; i<n_local_training_samples; i++)
291  {
292  numeric_index_type index = first_index + i;
293  training_vector->set(index, new_training_set[param_name][i]);
294  }
295  }
296 }
297 
298 
299 template <class Base>
300 void RBConstructionBase<Base>::generate_training_parameters_random(const Parallel::Communicator & communicator,
301  std::map<std::string, bool> log_param_scale,
302  std::map<std::string, std::unique_ptr<NumericVector<Number>>> & training_parameters_in,
303  unsigned int n_training_samples_in,
304  const RBParameters & min_parameters,
305  const RBParameters & max_parameters,
306  int training_parameters_random_seed,
307  bool serial_training_set)
308 {
309  libmesh_assert_equal_to ( min_parameters.n_parameters(), max_parameters.n_parameters() );
310  const unsigned int num_params = min_parameters.n_parameters();
311 
312  // Clear training_parameters_in
313  training_parameters_in.clear();
314 
315  if (num_params == 0)
316  return;
317 
318  if (training_parameters_random_seed < 0)
319  {
320  if (!serial_training_set)
321  {
322  // seed the random number generator with the system time
323  // and the processor ID so that the seed is different
324  // on different processors
325  std::srand( static_cast<unsigned>( std::time(0)*(1+communicator.rank()) ));
326  }
327  else
328  {
329  // seed the random number generator with the system time
330  // only so that the seed is the same on all processors
331  //
332  // Note that we broadcast the time on processor 0 to make
333  // sure all processors agree.
334  unsigned int current_time = static_cast<unsigned>( std::time(0) );
335  communicator.broadcast(current_time, 0);
336  std::srand(current_time);
337  }
338  }
339  else
340  {
341  if (!serial_training_set)
342  {
343  // seed the random number generator with the provided value
344  // and the processor ID so that the seed is different
345  // on different processors
346  std::srand( static_cast<unsigned>( training_parameters_random_seed*(1+communicator.rank()) ));
347  }
348  else
349  {
350  // seed the random number generator with the provided value
351  // so that the seed is the same on all processors
352  std::srand( static_cast<unsigned>( training_parameters_random_seed ));
353  }
354  }
355 
356  // initialize training_parameters_in
357  for (const auto & pr : min_parameters)
358  {
359  const std::string & param_name = pr.first;
360  training_parameters_in[param_name] = NumericVector<Number>::build(communicator);
361 
362  if (!serial_training_set)
363  {
364  // Calculate the number of training parameters local to this processor
365  unsigned int n_local_training_samples;
366  unsigned int quotient = n_training_samples_in/communicator.size();
367  unsigned int remainder = n_training_samples_in%communicator.size();
368  if (communicator.rank() < remainder)
369  n_local_training_samples = (quotient + 1);
370  else
371  n_local_training_samples = quotient;
372 
373  training_parameters_in[param_name]->init(n_training_samples_in, n_local_training_samples, false, PARALLEL);
374  }
375  else
376  {
377  training_parameters_in[param_name]->init(n_training_samples_in, false, SERIAL);
378  }
379  }
380 
381  // finally, set the values
382  for (auto & pr : training_parameters_in)
383  {
384  const std::string & param_name = pr.first;
385  NumericVector<Number> * training_vector = pr.second.get();
386 
387  numeric_index_type first_index = training_vector->first_local_index();
388  for (auto i : IntRange<numeric_index_type>(0, training_vector->local_size()))
389  {
390  numeric_index_type index = first_index + i;
391  Real random_number = static_cast<Real>(std::rand()) / RAND_MAX; // in range [0,1]
392 
393  // Generate log10 scaled training parameters
394  if (log_param_scale[param_name])
395  {
396  Real log_min = log10(min_parameters.get_value(param_name));
397  Real log_range = log10(max_parameters.get_value(param_name) / min_parameters.get_value(param_name));
398 
399  training_vector->set(index, pow(10., log_min + random_number*log_range ) );
400  }
401  // Generate linearly scaled training parameters
402  else
403  {
404  training_vector->set(index, random_number*(max_parameters.get_value(param_name) - min_parameters.get_value(param_name))
405  + min_parameters.get_value(param_name));
406  }
407  }
408  }
409 }
410 
411 template <class Base>
412 void RBConstructionBase<Base>::generate_training_parameters_deterministic(const Parallel::Communicator & communicator,
413  std::map<std::string, bool> log_param_scale,
414  std::map<std::string, std::unique_ptr<NumericVector<Number>>> & training_parameters_in,
415  unsigned int n_training_samples_in,
416  const RBParameters & min_parameters,
417  const RBParameters & max_parameters,
418  bool serial_training_set)
419 {
420  libmesh_assert_equal_to ( min_parameters.n_parameters(), max_parameters.n_parameters() );
421  const unsigned int num_params = min_parameters.n_parameters();
422 
423  if (num_params == 0)
424  return;
425 
426  if (num_params > 2)
427  {
428  libMesh::out << "ERROR: Deterministic training sample generation "
429  << " not implemented for more than two parameters." << std::endl;
430  libmesh_not_implemented();
431  }
432 
433  // Clear training_parameters_in (but don't remove existing keys!)
434  for (auto & pr : training_parameters_in)
435  pr.second.reset(nullptr);
436 
437  // Initialize training_parameters_in
438  for (const auto & pr : min_parameters)
439  {
440  const std::string & param_name = pr.first;
441  training_parameters_in[param_name] = NumericVector<Number>::build(communicator);
442 
443  if (!serial_training_set)
444  {
445  // Calculate the number of training parameters local to this processor
446  unsigned int n_local_training_samples;
447  unsigned int quotient = n_training_samples_in/communicator.size();
448  unsigned int remainder = n_training_samples_in%communicator.size();
449  if (communicator.rank() < remainder)
450  n_local_training_samples = (quotient + 1);
451  else
452  n_local_training_samples = quotient;
453 
454  training_parameters_in[param_name]->init(n_training_samples_in, n_local_training_samples, false, PARALLEL);
455  }
456  else
457  {
458  training_parameters_in[param_name]->init(n_training_samples_in, false, SERIAL);
459  }
460  }
461 
462  if (num_params == 1)
463  {
464  NumericVector<Number> * training_vector = training_parameters_in.begin()->second.get();
465  bool use_log_scaling = log_param_scale.begin()->second;
466  Real min_param = min_parameters.begin()->second;
467  Real max_param = max_parameters.begin()->second;
468 
469  numeric_index_type first_index = training_vector->first_local_index();
470  for (auto i : IntRange<numeric_index_type>(0, training_vector->local_size()))
471  {
472  numeric_index_type index = first_index+i;
473  if (use_log_scaling)
474  {
475  Real epsilon = 1.e-6; // Prevent rounding errors triggering asserts
476  Real log_min = log10(min_param + epsilon);
477  Real log_range = log10( (max_param-epsilon) / (min_param+epsilon) );
478  Real step_size = log_range /
479  std::max((unsigned int)1,(n_training_samples_in-1));
480 
481  if (index<(n_training_samples_in-1))
482  {
483  training_vector->set(index, pow(10., log_min + index*step_size ));
484  }
485  else
486  {
487  // due to rounding error, the last parameter can be slightly
488  // bigger than max_parameters, hence snap back to the max
489  training_vector->set(index, max_param);
490  }
491  }
492  else
493  {
494  // Generate linearly scaled training parameters
495  Real step_size = (max_param - min_param) /
496  std::max((unsigned int)1,(n_training_samples_in-1));
497  training_vector->set(index, index*step_size + min_param);
498  }
499  }
500  }
501 
502 
503  // This is for two parameters
504  if (num_params == 2)
505  {
506  // First make sure n_training_samples_in is a square number
507  unsigned int n_training_parameters_per_var = static_cast<unsigned int>( std::sqrt(static_cast<Real>(n_training_samples_in)) );
508  if ((n_training_parameters_per_var*n_training_parameters_per_var) != n_training_samples_in)
509  libmesh_error_msg("Error: Number of training parameters = " \
510  << n_training_samples_in \
511  << ".\n" \
512  << "Deterministic training set generation with two parameters requires\n " \
513  << "the number of training parameters to be a perfect square.");
514 
515  // make a matrix to store all the parameters, put them in vector form afterwards
516  std::vector<std::vector<Real>> training_parameters_matrix(num_params);
517 
518  unsigned int i = 0;
519  for (const auto & pr : min_parameters)
520  {
521  const std::string & param_name = pr.first;
522  Real min_param = pr.second;
523  bool use_log_scaling = log_param_scale[param_name];
524  Real max_param = max_parameters.get_value(param_name);
525 
526  training_parameters_matrix[i].resize(n_training_parameters_per_var);
527 
528  for (unsigned int j=0; j<n_training_parameters_per_var; j++)
529  {
530  // Generate log10 scaled training parameters
531  if (use_log_scaling)
532  {
533  Real epsilon = 1.e-6; // Prevent rounding errors triggering asserts
534  Real log_min = log10(min_param + epsilon);
535  Real log_range = log10( (max_param-epsilon) / (min_param+epsilon) );
536  Real step_size = log_range /
537  std::max((unsigned int)1,(n_training_parameters_per_var-1));
538 
539  if (j<(n_training_parameters_per_var-1))
540  {
541  training_parameters_matrix[i][j] = pow(10., log_min + j*step_size );
542  }
543  else
544  {
545  // due to rounding error, the last parameter can be slightly
546  // bigger than max_parameters, hence snap back to the max
547  training_parameters_matrix[i][j] = max_param;
548  }
549  }
550  else
551  {
552  // Generate linearly scaled training parameters
553  Real step_size = (max_param - min_param) /
554  std::max((unsigned int)1,(n_training_parameters_per_var-1));
555  training_parameters_matrix[i][j] = j*step_size + min_param;
556  }
557 
558  }
559  i++;
560  }
561 
562  // now load into training_samples_in:
563  std::map<std::string, std::unique_ptr<NumericVector<Number>>>::iterator new_it = training_parameters_in.begin();
564 
565  NumericVector<Number> * training_vector_0 = new_it->second.get();
566  ++new_it;
567  NumericVector<Number> * training_vector_1 = new_it->second.get();
568 
569  for (unsigned int index1=0; index1<n_training_parameters_per_var; index1++)
570  {
571  for (unsigned int index2=0; index2<n_training_parameters_per_var; index2++)
572  {
573  unsigned int index = index1*n_training_parameters_per_var + index2;
574 
575  if ((training_vector_0->first_local_index() <= index) &&
576  (index < training_vector_0->last_local_index()))
577  {
578  training_vector_0->set(index, training_parameters_matrix[0][index1]);
579  training_vector_1->set(index, training_parameters_matrix[1][index2]);
580  }
581  }
582  }
583 
584  // libMesh::out << "n_training_samples = " << n_training_samples_in << std::endl;
585  // for (unsigned int index=0; index<n_training_samples_in; index++)
586  // {
587  // libMesh::out << "training parameters for index="<<index<<":"<<std::endl;
588  // for (unsigned int param=0; param<num_params; param++)
589  // {
590  // libMesh::out << " " << (*training_parameters_in[param])(index);
591  // }
592  // libMesh::out << std::endl << std::endl;
593  // }
594 
595  }
596 }
597 
598 
599 template <class Base>
601 {
602  libmesh_assert_less (proc_id, this->n_processors());
603 
604  // create a copy of the current parameters
605  RBParameters current_parameters = get_parameters();
606 
607  // copy current_parameters to current_parameters_vector in order to broadcast
608  std::vector<Real> current_parameters_vector;
609 
610  for (const auto & pr : current_parameters)
611  current_parameters_vector.push_back(pr.second);
612 
613  // do the broadcast
614  this->comm().broadcast(current_parameters_vector, proc_id);
615 
616  // update the copy of the RBParameters object
617  unsigned int count = 0;
618  for (const auto & pr : current_parameters)
619  {
620  const std::string & param_name = pr.first;
621  current_parameters.set_value(param_name, current_parameters_vector[count]);
622  count++;
623  }
624 
625  // set the parameters globally
626  set_parameters(current_parameters);
627 }
628 
629 template <class Base>
631 {
632  this->training_parameters_random_seed = seed;
633 }
634 
635 // Template specializations
636 
637 // EigenSystem is only defined if we have SLEPc
638 #if defined(LIBMESH_HAVE_SLEPC)
640 #endif
641 
643 
644 } // namespace libMesh
libMesh::RBParameters::set_extra_value
void set_extra_value(const std::string &param_name, Real value)
Set the value of the specified extra parameter.
Definition: rb_parameters.C:58
libMesh::PARALLEL
Definition: enum_parallel_type.h:36
libMesh::RBParameters::begin
const_iterator begin() const
Get const_iterator access to the parameters stored in this RBParameters object.
Definition: rb_parameters.C:89
libMesh::libmesh_real
T libmesh_real(T a)
Definition: libmesh_common.h:166
libMesh::SERIAL
Definition: enum_parallel_type.h:35
libMesh::NumericVector::last_local_index
virtual numeric_index_type last_local_index() const =0
libMesh
The libMesh namespace provides an interface to certain functionality in the library.
Definition: factoryfunction.C:55
std::sqrt
MetaPhysicL::DualNumber< T, D > sqrt(const MetaPhysicL::DualNumber< T, D > &in)
libMesh::RBParameters
This class is part of the rbOOmit framework.
Definition: rb_parameters.h:42
libMesh::RBParameters::n_parameters
unsigned int n_parameters() const
Get the number of parameters that have been added.
Definition: rb_parameters.C:63
libMesh::NumericVector
Provides a uniform interface to vector storage schemes for different linear algebra libraries.
Definition: vector_fe_ex5.C:43
libMesh::libmesh_assert
libmesh_assert(ctx)
libMesh::IntRange
The IntRange templated class is intended to make it easy to loop over integers which are indices of a...
Definition: int_range.h:53
libMesh::processor_id_type
uint8_t processor_id_type
Definition: id_types.h:104
libMesh::RBConstructionBase
This class is part of the rbOOmit framework.
Definition: rb_construction_base.h:54
libMesh::RBParameters::set_value
void set_value(const std::string &param_name, Real value)
Set the value of the specified parameter.
Definition: rb_parameters.C:47
std::pow
double pow(double a, int b)
Definition: libmesh_augment_std_namespace.h:58
libMesh::NumericVector::local_size
virtual numeric_index_type local_size() const =0
libMesh::as_range
SimpleRange< IndexType > as_range(const std::pair< IndexType, IndexType > &p)
Helper function that allows us to treat a homogenous pair as a range.
Definition: simple_range.h:57
libMesh::RBConstructionBase::RBConstructionBase
RBConstructionBase(EquationSystems &es, const std::string &name, const unsigned int number)
Constructor.
Definition: rb_construction_base.C:46
libMesh::numeric_index_type
dof_id_type numeric_index_type
Definition: id_types.h:99
libMesh::EquationSystems
This is the EquationSystems class.
Definition: equation_systems.h:74
libMesh::NumericVector::set
virtual void set(const numeric_index_type i, const T value)=0
Sets v(i) = value.
value
static const bool value
Definition: xdr_io.C:56
libMesh::NumericVector::get
virtual void get(const std::vector< numeric_index_type > &index, T *values) const
Access multiple components at once.
Definition: numeric_vector.h:821
libMesh::Real
DIE A HORRIBLE DEATH HERE typedef LIBMESH_DEFAULT_SCALAR_TYPE Real
Definition: libmesh_common.h:121
libMesh::out
OStreamProxy out
libMesh::RBParameters::get_value
Real get_value(const std::string &param_name) const
Get the value of the specific parameter.
Definition: rb_parameters.C:41
libMesh::NumericVector::first_local_index
virtual numeric_index_type first_local_index() const =0