https://mooseframework.inl.gov
ActiveLearningGaussianProcess.C
Go to the documentation of this file.
1 //* This file is part of the MOOSE framework
2 //* https://mooseframework.inl.gov
3 //*
4 //* All rights reserved, see COPYRIGHT for full restrictions
5 //* https://github.com/idaholab/moose/blob/master/COPYRIGHT
6 //*
7 //* Licensed under LGPL 2.1, please see LICENSE for details
8 //* https://www.gnu.org/licenses/lgpl-2.1.html
9 
11 
12 #include <petsctao.h>
13 #include <petscdmda.h>
14 
15 #include "libmesh/petsc_vector.h"
16 #include "libmesh/petsc_matrix.h"
17 
18 #include <math.h>
19 
21 
24 {
26  params.addClassDescription(
27  "Permit re-training Gaussian Process surrogate model for active learning.");
28  params.addRequiredParam<UserObjectName>("covariance_function", "Name of covariance function.");
29  params.addParam<bool>(
30  "standardize_params", true, "Standardize (center and scale) training parameters (x values)");
31  params.addParam<bool>(
32  "standardize_data", true, "Standardize (center and scale) training data (y values)");
33  params.addParam<unsigned int>("num_iters", 1000, "Tolerance value for Adam optimization");
34  params.addParam<unsigned int>("batch_size", 0, "The batch size for Adam optimization");
35  params.addParam<Real>("learning_rate", 0.001, "The learning rate for Adam optimization");
36  params.addParam<unsigned int>(
37  "show_every_nth_iteration",
38  0,
39  "Switch to show Adam optimization loss values at every nth step. If 0, nothing is showed.");
40  params.addParam<std::vector<std::string>>(
41  "tune_parameters", {}, "Select hyperparameters to be tuned");
42  params.addParam<std::vector<Real>>("tuning_min", {}, "Minimum allowable tuning value");
43  params.addParam<std::vector<Real>>("tuning_max", {}, "Maximum allowable tuning value");
44  return params;
45 }
46 
48  : SurrogateTrainerBase(parameters),
49  CovarianceInterface(parameters),
51  _gp(declareModelData<StochasticTools::GaussianProcess>("_gp")),
52  _training_params(declareModelData<RealEigenMatrix>("_training_params")),
53  _standardize_params(getParam<bool>("standardize_params")),
54  _standardize_data(getParam<bool>("standardize_data")),
55  _optimization_opts(StochasticTools::GaussianProcess::GPOptimizerOptions(
56  getParam<unsigned int>("show_every_nth_iteration"),
57  getParam<unsigned int>("num_iters"),
58  getParam<unsigned int>("batch_size"),
59  getParam<Real>("learning_rate")))
60 {
61  _gp.initialize(getCovarianceFunctionByName(getParam<UserObjectName>("covariance_function")),
62  getParam<std::vector<std::string>>("tune_parameters"),
63  getParam<std::vector<Real>>("tuning_min"),
64  getParam<std::vector<Real>>("tuning_max"));
65 }
66 
67 void
68 ActiveLearningGaussianProcess::reTrain(const std::vector<std::vector<Real>> & inputs,
69  const std::vector<Real> & outputs) const
70 {
71 
72  // Addtional error check for each re-train call of the GP surrogate
73  if (inputs.size() != outputs.size())
74  mooseError("Number of inputs (",
75  inputs.size(),
76  ") does not match number of outputs (",
77  outputs.size(),
78  ").");
79  if (inputs.empty())
80  mooseError("There is no data for retraining.");
81 
82  RealEigenMatrix training_data;
83  _training_params.setZero(outputs.size(), inputs[0].size());
84  training_data.setZero(outputs.size(), 1);
85 
86  for (unsigned int i = 0; i < outputs.size(); ++i)
87  {
88  training_data(i, 0) = outputs[i];
89  for (unsigned int j = 0; j < inputs[i].size(); ++j)
90  _training_params(i, j) = inputs[i][j];
91  }
92 
93  // Standardize (center and scale) training params
96  // if not standardizing data set mean=0, std=1 for use in surrogate
97  else
98  _gp.paramStandardizer().set(0, 1, inputs[0].size());
99 
100  // Standardize (center and scale) training data
101  if (_standardize_data)
102  _gp.standardizeData(training_data);
103  // if not standardizing data set mean=0, std=1 for use in surrogate
104  else
105  _gp.dataStandardizer().set(0, 1, inputs[0].size());
106 
107  // Setup the covariance
109 }
void setupCovarianceMatrix(const RealEigenMatrix &training_params, const RealEigenMatrix &training_data, const GPOptimizerOptions &opts)
Sets up the covariance matrix given data and optimization options.
void addParam(const std::string &name, const std::initializer_list< typename T::value_type > &value, const std::string &doc_string)
RealEigenMatrix & _training_params
Paramaters (x) used for training, along with statistics.
virtual void reTrain(const std::vector< std::vector< Real >> &inputs, const std::vector< Real > &outputs) const final
void standardizeData(RealEigenMatrix &data, bool keep_moments=false)
Standardizes the vector of responses (y values).
void addRequiredParam(const std::string &name, const std::string &doc_string)
static InputParameters validParams()
Enum for batch type in stochastic tools MultiApp.
StochasticTools::Standardizer & dataStandardizer()
registerMooseObject("StochasticToolsApp", ActiveLearningGaussianProcess)
StochasticTools::Standardizer & paramStandardizer()
Get non-constant reference to the contained structures (if they need to be modified from the utside) ...
ActiveLearningGaussianProcess(const InputParameters &parameters)
const T & getParam(const std::string &name) const
Eigen::Matrix< Real, Eigen::Dynamic, Eigen::Dynamic > RealEigenMatrix
void initialize(CovarianceFunctionBase *covariance_function, const std::vector< std::string > &params_to_tune, const std::vector< Real > &min=std::vector< Real >(), const std::vector< Real > &max=std::vector< Real >())
Initializes the most important structures in the Gaussian Process: the covariance function and a tuni...
bool _standardize_data
Switch for training data(y) standardization.
DIE A HORRIBLE DEATH HERE typedef LIBMESH_DEFAULT_SCALAR_TYPE Real
Interface for objects that need to use samplers.
void set(const Real &n)
Methods for setting mean and standard deviation directly Sets mean=0, std=1 for n variables...
Definition: Standardizer.C:16
void mooseError(Args &&... args) const
void addClassDescription(const std::string &doc_string)
static const std::complex< double > j(0, 1)
Complex number "j" (also known as "i")
StochasticTools::GaussianProcess & _gp
The GP handler.
const StochasticTools::GaussianProcess::GPOptimizerOptions _optimization_opts
Struct holding parameters necessary for parameter tuning.
This is the base trainer class whose main functionality is the API for declaring model data...
bool _standardize_params
Switch for training param (x) standardization.
void ErrorVector unsigned int
void standardizeParameters(RealEigenMatrix &parameters, bool keep_moments=false)
Standardizes the vector of input parameters (x values).
CovarianceFunctionBase * getCovarianceFunctionByName(const UserObjectName &name) const
Lookup a CovarianceFunction object by name and return pointer.