13 #include <petscdmda.h> 15 #include "libmesh/petsc_vector.h" 16 #include "libmesh/petsc_matrix.h" 27 "Permit re-training Gaussian Process surrogate model for active learning.");
28 params.
addRequiredParam<UserObjectName>(
"covariance_function",
"Name of covariance function.");
30 "standardize_params",
true,
"Standardize (center and scale) training parameters (x values)");
32 "standardize_data",
true,
"Standardize (center and scale) training data (y values)");
33 params.
addParam<
unsigned int>(
"num_iters", 1000,
"Tolerance value for Adam optimization");
34 params.
addParam<
unsigned int>(
"batch_size", 0,
"The batch size for Adam optimization");
35 params.
addParam<
Real>(
"learning_rate", 0.001,
"The learning rate for Adam optimization");
37 "show_every_nth_iteration",
39 "Switch to show Adam optimization loss values at every nth step. If 0, nothing is showed.");
40 params.
addParam<std::vector<std::string>>(
41 "tune_parameters", {},
"Select hyperparameters to be tuned");
42 params.
addParam<std::vector<Real>>(
"tuning_min", {},
"Minimum allowable tuning value");
43 params.
addParam<std::vector<Real>>(
"tuning_max", {},
"Maximum allowable tuning value");
52 _training_params(declareModelData<
RealEigenMatrix>(
"_training_params")),
53 _standardize_params(getParam<bool>(
"standardize_params")),
54 _standardize_data(getParam<bool>(
"standardize_data")),
55 _optimization_opts(
StochasticTools::GaussianProcess::GPOptimizerOptions(
56 getParam<unsigned
int>(
"show_every_nth_iteration"),
57 getParam<unsigned
int>(
"num_iters"),
58 getParam<unsigned
int>(
"batch_size"),
59 getParam<
Real>(
"learning_rate")))
62 getParam<std::vector<std::string>>(
"tune_parameters"),
63 getParam<std::vector<Real>>(
"tuning_min"),
64 getParam<std::vector<Real>>(
"tuning_max"));
69 const std::vector<Real> & outputs)
const 73 if (inputs.size() != outputs.size())
76 ") does not match number of outputs (",
80 mooseError(
"There is no data for retraining.");
84 training_data.setZero(outputs.size(), 1);
86 for (
unsigned int i = 0; i < outputs.size(); ++i)
88 training_data(i, 0) = outputs[i];
89 for (
unsigned int j = 0;
j < inputs[i].size(); ++
j)
RealEigenMatrix & _training_params
Paramaters (x) used for training, along with statistics.
virtual void reTrain(const std::vector< std::vector< Real >> &inputs, const std::vector< Real > &outputs) const final
static InputParameters validParams()
registerMooseObject("StochasticToolsApp", ActiveLearningGaussianProcess)
ActiveLearningGaussianProcess(const InputParameters ¶meters)
const T & getParam(const std::string &name) const
Eigen::Matrix< Real, Eigen::Dynamic, Eigen::Dynamic > RealEigenMatrix
bool _standardize_data
Switch for training data(y) standardization.
DIE A HORRIBLE DEATH HERE typedef LIBMESH_DEFAULT_SCALAR_TYPE Real
Interface for objects that need to use samplers.
void mooseError(Args &&... args) const
static const std::complex< double > j(0, 1)
Complex number "j" (also known as "i")
StochasticTools::GaussianProcess & _gp
The GP handler.
const StochasticTools::GaussianProcess::GPOptimizerOptions _optimization_opts
Struct holding parameters necessary for parameter tuning.
This is the base trainer class whose main functionality is the API for declaring model data...
static InputParameters validParams()
bool _standardize_params
Switch for training param (x) standardization.
void ErrorVector unsigned int
CovarianceFunctionBase * getCovarianceFunctionByName(const UserObjectName &name) const
Lookup a CovarianceFunction object by name and return pointer.