LCOV - code coverage report
Current view: top level - include/reduced_basis - rb_construction_base.h (source / functions) Hit Total Coverage
Test: libMesh/libmesh: #4229 (6a9aeb) with base 727f46 Lines: 5 6 83.3 %
Date: 2025-08-19 19:27:09 Functions: 3 15 20.0 %
Legend: Lines: hit not hit

          Line data    Source code
       1             : // rbOOmit: An implementation of the Certified Reduced Basis method.
       2             : // Copyright (C) 2009, 2010 David J. Knezevic
       3             : 
       4             : // This file is part of rbOOmit.
       5             : 
       6             : // rbOOmit is free software; you can redistribute it and/or
       7             : // modify it under the terms of the GNU Lesser General Public
       8             : // License as published by the Free Software Foundation; either
       9             : // version 2.1 of the License, or (at your option) any later version.
      10             : 
      11             : // rbOOmit is distributed in the hope that it will be useful,
      12             : // but WITHOUT ANY WARRANTY; without even the implied warranty of
      13             : // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
      14             : // Lesser General Public License for more details.
      15             : 
      16             : // You should have received a copy of the GNU Lesser General Public
      17             : // License along with this library; if not, write to the Free Software
      18             : // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
      19             : 
      20             : #ifndef LIBMESH_RB_CONSTRUCTION_BASE_H
      21             : #define LIBMESH_RB_CONSTRUCTION_BASE_H
      22             : 
      23             : // rbOOmit includes
      24             : #include "libmesh/id_types.h"
      25             : #include "libmesh/rb_parametrized.h"
      26             : #include "libmesh/rb_theta_expansion.h"
      27             : #include "libmesh/rb_theta.h"
      28             : 
      29             : // libMesh includes
      30             : #include "libmesh/system.h"
      31             : #include "libmesh/numeric_vector.h"
      32             : #include "libmesh/linear_solver.h"
      33             : #include "libmesh/perf_log.h"
      34             : 
      35             : // C++ includes
      36             : #include <set>
      37             : 
      38             : namespace libMesh
      39             : {
      40             : 
      41             : /**
      42             :  * This class is part of the rbOOmit framework.
      43             :  *
      44             :  * This is the base class for the Construction stage
      45             :  * of the certified reduced basis (RB) method.
      46             :  * We template the Base class so that we can derive from
      47             :  * the appropriate libMesh System type (e.g. LinearImplicitSystem
      48             :  * for standard reduced basis, EigenSystem for SCM)
      49             :  * at compile time.
      50             :  *
      51             :  * \author David J. Knezevic
      52             :  * \date 2009
      53             :  */
      54             : template<class Base>
      55          48 : class RBConstructionBase : public Base, public RBParametrized
      56             : {
      57             : public:
      58             : 
      59             :   /**
      60             :    * Constructor.
      61             :    */
      62             :   RBConstructionBase (EquationSystems & es,
      63             :                       const std::string & name,
      64             :                       const unsigned int number);
      65             : 
      66             :   /**
      67             :    * Special functions.
      68             :    * - This class has the same restrictions as the union of its
      69             :    *   potential base classes (currently LinearImplicitSystem and
      70             :    *   EigenSystem).
      71             :    * - Destructor is defaulted out-of-line.
      72             :    */
      73             :   RBConstructionBase (RBConstructionBase &&) = default;
      74             :   RBConstructionBase & operator= (RBConstructionBase &&) = delete;
      75             :   RBConstructionBase (const RBConstructionBase &) = delete;
      76             :   RBConstructionBase & operator= (const RBConstructionBase &) = delete;
      77             :   virtual ~RBConstructionBase ();
      78             : 
      79             :   /**
      80             :    * The type of system.
      81             :    */
      82             :   typedef RBConstructionBase<Base> sys_type;
      83             : 
      84             :   /**
      85             :    * \returns A reference to *this.
      86             :    */
      87           0 :   sys_type & system () { return *this; }
      88             : 
      89             :   /**
      90             :    * Clear all the data structures associated with
      91             :    * the system.
      92             :    */
      93             :   virtual void clear ();
      94             : 
      95             :   /**
      96             :    * Set the quiet_mode flag. If quiet == false then
      97             :    * we print out a lot of extra information
      98             :    * during the Offline stage.
      99             :    */
     100           8 :   void set_quiet_mode(bool quiet_mode_in)
     101         284 :   { this->quiet_mode = quiet_mode_in; }
     102             : 
     103             :   /**
     104             :    * Is the system in quiet mode?
     105             :    */
     106         972 :   bool is_quiet() const
     107       33427 :   { return this->quiet_mode; }
     108             : 
     109             :   /**
     110             :    * Set the boolean option that indicates if we normalization
     111             :    * solution snapshots or not.
     112             :    */
     113             :   void set_normalize_solution_snapshots(bool value);
     114             : 
     115             :   /**
     116             :    * Get the number of global training samples.
     117             :    */
     118             :   numeric_index_type get_n_training_samples() const;
     119             : 
     120             :   /**
     121             :    * Get the total number of training samples local to this processor.
     122             :    */
     123             :   numeric_index_type get_local_n_training_samples() const;
     124             : 
     125             :   /**
     126             :    * Get the first local index of the training parameters.
     127             :    */
     128             :   numeric_index_type get_first_local_training_index() const;
     129             : 
     130             :   /**
     131             :    * Get the last local index of the training parameters.
     132             :    */
     133             :   numeric_index_type get_last_local_training_index() const;
     134             : 
     135             :   /**
     136             :    * Initialize the parameter ranges and indicate whether deterministic
     137             :    * or random training parameters should be used and whether or
     138             :    * not we want the parameters to be scaled logarithmically.
     139             :    * \p n_global_training_samples is the total number of samples to
     140             :    * generate, which will be distributed across all the processors.
     141             :    */
     142             :   virtual void initialize_training_parameters(const RBParameters & mu_min,
     143             :                                               const RBParameters & mu_max,
     144             :                                               const unsigned int n_global_training_samples,
     145             :                                               const std::map<std::string, bool> & log_param_scale,
     146             :                                               const bool deterministic=true);
     147             : 
     148             :   /**
     149             :    * Overwrite the training parameters with new_training_set.
     150             :    * This training set is assumed to contain only the samples local to this processor.
     151             :    */
     152             :   virtual void load_training_set(const std::map<std::string, std::vector<RBParameter>> & new_training_set);
     153             : 
     154             :   /**
     155             :    * Overwrite the local training samples for \p param_name using \p values.
     156             :    * This assumes that values.size() matches get_local_n_training_samples().
     157             :    */
     158             :   void set_training_parameter_values(const std::string & param_name, const std::vector<RBParameter> & values);
     159             : 
     160             :   /**
     161             :    * Broadcasts parameters from processor proc_id to all processors.
     162             :    * This broadcasts the RBParameters object from .get_parameters(),
     163             :    * and then sets it on all processors with .set_parameters().
     164             :    */
     165             :   void broadcast_parameters(const unsigned int proc_id);
     166             : 
     167             :   /**
     168             :    * Set the seed that is used to randomly generate training parameters.
     169             :    */
     170             :   void set_training_random_seed(int seed);
     171             : 
     172             :   /**
     173             :    * In some cases we only want to allow discrete parameter values, instead
     174             :    * of parameters that may take any value in a specified interval.
     175             :    * Here we provide a method to set the d
     176             :    * Set the discrete values for parameter \p mu that are allowed in the
     177             :    * training set. This must be called before the training set is generated.
     178             :    */
     179             : 
     180             :   /**
     181             :    * Set the name of the parameter that we will generate deterministic training parameters for.
     182             :    * Defaults to "NONE".
     183             :    */
     184             :   void set_deterministic_training_parameter_name(const std::string & name);
     185             : 
     186             :   /**
     187             :    * Get the name of the parameter that we will generate deterministic training parameters for.
     188             :    */
     189             :   const std::string & get_deterministic_training_parameter_name() const;
     190             : 
     191             :   /**
     192             :    * Static helper function for generating a randomized set of parameters.
     193             :    * The parameter \p n_global_training_samples_in is the total number of parameters to
     194             :    * generate, and they will be split across all the processors (unless serial_training_set=true)
     195             :    * in the \p local_training_parameters_in map.
     196             :    * \return a pair of {first_local_index,last_local_index}
     197             :    */
     198             :   static std::pair<std::size_t, std::size_t>
     199             :   generate_training_parameters_random(const Parallel::Communicator & communicator,
     200             :                                       const std::map<std::string, bool> & log_param_scale,
     201             :                                       std::map<std::string, std::vector<RBParameter>> & local_training_parameters_in,
     202             :                                       const unsigned int n_global_training_samples_in,
     203             :                                       const RBParameters & min_parameters,
     204             :                                       const RBParameters & max_parameters,
     205             :                                       const int training_parameters_random_seed=-1,
     206             :                                       const bool serial_training_set=false);
     207             : 
     208             :   /**
     209             :    * Static helper function for generating a deterministic set of parameters. Only works with 1 or 2
     210             :    * parameters (as defined by the lengths of min/max parameters vectors), otherwise throws an error.
     211             :    * The parameter \p n_global_training_samples_in is the total number of parameters to
     212             :    * generate, and they will be split across all the processors (unless serial_training_set=true)
     213             :    * in the \p local_training_parameters_in map.
     214             :    * \return a pair of {first_local_index,last_local_index}
     215             :    */
     216             :   static std::pair<std::size_t, std::size_t>
     217             :   generate_training_parameters_deterministic(const Parallel::Communicator & communicator,
     218             :                                              const std::map<std::string, bool> & log_param_scale,
     219             :                                              std::map<std::string, std::vector<RBParameter>> & local_training_parameters_in,
     220             :                                              const unsigned int n_global_training_samples_in,
     221             :                                              const RBParameters & min_parameters,
     222             :                                              const RBParameters & max_parameters,
     223             :                                              const bool serial_training_set=false);
     224             : 
     225             : protected:
     226             : 
     227             :   /**
     228             :    * Initializes the member data fields associated with
     229             :    * the system, so that, e.g., \p assemble() may be used.
     230             :    */
     231             :   virtual void init_data ();
     232             : 
     233             :   /**
     234             :    * Return the RBParameters in index \p global_index of the global training set.
     235             :    * Why do we use an index here? RBParameters supports loading the full sample set.
     236             :    * This seems probably unnecessary now to load individually. Maybe it's a memory issue?
     237             :    */
     238             :   RBParameters get_params_from_training_set(unsigned int global_index);
     239             : 
     240             :   /**
     241             :    * Set parameters to the RBParameters stored in index \p global_index of the global training set.
     242             :    */
     243             :   void set_params_from_training_set(unsigned int global_index);
     244             : 
     245             :   /**
     246             :    * Load the specified training parameter and then broadcast to all processors.
     247             :    */
     248             :   virtual void set_params_from_training_set_and_broadcast(unsigned int global_index);
     249             : 
     250             :   /**
     251             :    * Static function to return the error pair (index,error)
     252             :    * that is corresponds to the largest error on all
     253             :    * processors.
     254             :    */
     255             :   static void get_global_max_error_pair(const Parallel::Communicator & communicator,
     256             :                                         std::pair<numeric_index_type, Real> & error_pair);
     257             : 
     258             : 
     259             :   //----------- PROTECTED DATA MEMBERS -----------//
     260             : 
     261             :   /**
     262             :    * Flag to indicate whether we print out extra information during
     263             :    * the Offline stage.
     264             :    */
     265             :   bool quiet_mode;
     266             : 
     267             :   /**
     268             :    * This boolean flag indicates whether or not the training set should
     269             :    * be the same on all processors. By default it is false, but in the
     270             :    * case of the Empirical Interpolation Method (RBEIMConstruction),
     271             :    * for example, we need the training set to be identical on all processors.
     272             :    */
     273             :   bool serial_training_set;
     274             : 
     275             :   /**
     276             :    * Set this boolean to true if we want to normalize solution snapshots
     277             :    * used in training to have norm of 1. This is relevant if snapshots
     278             :    * have differing magnitudes and we want to approximate them all with
     279             :    * equal accuracy.
     280             :    */
     281             :   bool _normalize_solution_snapshots;
     282             : 
     283             :   /**
     284             :    * We keep an extra temporary vector that is useful for
     285             :    * performing inner products (avoids unnecessary memory
     286             :    * allocation/deallocation).
     287             :    */
     288             :   std::unique_ptr<NumericVector<Number>> inner_product_storage_vector;
     289             : 
     290             : 
     291             : private:
     292             : 
     293             :   /**
     294             :    * Boolean flag to indicate whether or not the
     295             :    * parameter ranges have been initialized.
     296             :    */
     297             :   bool _training_parameters_initialized;
     298             : 
     299             :   /**
     300             :    * The training samples for each parameter.
     301             :    * When serial_training_set is true, the map contains all samples of all parameters.
     302             :    * Otherwise, the sample vectors will only contain the values for the local samples
     303             :    * as defined by _first_local_index and _n_local_training_samples.
     304             :    * Mapped from parameter_name -> sample_vector -> value_vector.
     305             :    */
     306             :   std::map<std::string, std::vector<RBParameter>> _training_parameters;
     307             : 
     308             :   /**
     309             :    * The first sample-vector index from the global vector which is stored in
     310             :    * the _training_parameters on this processor.
     311             :    * _n_local_training_samples is equivalent to the .size() of any vector in _training_parameters.
     312             :    */
     313             :   numeric_index_type _first_local_index;
     314             :   numeric_index_type _n_local_training_samples;
     315             :   numeric_index_type _n_global_training_samples;
     316             : 
     317             :   /**
     318             :    * If < 0, use std::time() * processor_id() to seed the random
     319             :    * number generator for the training parameters (default).  If
     320             :    * >= 0, use the provided value * processor_id() as the random
     321             :    * number generator seed.
     322             :    */
     323             :   int _training_parameters_random_seed;
     324             : };
     325             : 
     326             : } // namespace libMesh
     327             : 
     328             : 
     329             : #endif // LIBMESH_RB_CONSTRUCTION_BASE_H

Generated by: LCOV version 1.14