https://mooseframework.inl.gov
LibtorchArtificialNeuralNet.h
Go to the documentation of this file.
1 //* This file is part of the MOOSE framework
2 //* https://mooseframework.inl.gov
3 //*
4 //* All rights reserved, see COPYRIGHT for full restrictions
5 //* https://github.com/idaholab/moose/blob/master/COPYRIGHT
6 //*
7 //* Licensed under LGPL 2.1, please see LICENSE for details
8 //* https://www.gnu.org/licenses/lgpl-2.1.html
9 
10 #ifdef MOOSE_LIBTORCH_ENABLED
11 
12 #pragma once
13 
14 #include <torch/torch.h>
15 #include <torch/script.h>
16 #include "LibtorchNeuralNetBase.h"
17 #include "MooseError.h"
18 #include "DataIO.h"
19 #include "MultiMooseEnum.h"
20 #include "nlohmann/json.h"
21 
22 namespace Moose
23 {
24 
25 // A class that describes a simple feed-forward neural net.
26 class LibtorchArtificialNeuralNet : public torch::nn::Module, public LibtorchNeuralNetBase
27 {
28 public:
36  LibtorchArtificialNeuralNet(const std::string name,
37  const unsigned int num_inputs,
38  const unsigned int num_outputs,
39  const std::vector<unsigned int> & num_neurons_per_layer,
40  const std::vector<std::string> & activation_function = {"relu"},
41  const torch::DeviceType device_type = torch::kCPU,
42  const torch::ScalarType scalar_type = torch::kDouble);
43 
49 
56  virtual void addLayer(const std::string & layer_name,
57  const std::unordered_map<std::string, unsigned int> & parameters);
58 
64  virtual torch::Tensor forward(const torch::Tensor & x) override;
65 
67  const std::string & name() const { return _name; }
69  unsigned int numInputs() const { return _num_inputs; }
71  unsigned int numOutputs() const { return _num_outputs; }
73  unsigned int numHiddenLayers() const { return _num_neurons_per_layer.size(); }
75  const std::vector<unsigned int> & numNeuronsPerLayer() const { return _num_neurons_per_layer; }
79  torch::DeviceType deviceType() const { return _device_type; }
81  torch::ScalarType dataType() const { return _data_type; }
84 
86  void store(nlohmann::json & json) const;
87 
88 protected:
90  const std::string _name;
93  std::vector<torch::nn::Linear> _weights;
94  // Number of neurons on the input layer
95  const unsigned int _num_inputs;
97  const unsigned int _num_outputs;
99  const std::vector<unsigned int> _num_neurons_per_layer;
104  const torch::DeviceType _device_type;
106  const torch::ScalarType _data_type;
107 };
108 
109 void to_json(nlohmann::json & json, const Moose::LibtorchArtificialNeuralNet * const & network);
110 
111 }
112 
113 template <>
114 void dataStore<Moose::LibtorchArtificialNeuralNet>(
115  std::ostream & stream,
116  std::shared_ptr<Moose::LibtorchArtificialNeuralNet> & nn,
117  void * context);
118 
119 template <>
120 void dataLoad<Moose::LibtorchArtificialNeuralNet>(
121  std::istream & stream,
122  std::shared_ptr<Moose::LibtorchArtificialNeuralNet> & nn,
123  void * context);
124 
125 // This is needed because the reporter which is used to ouput the neural net parameters to JSON
126 // requires a dataStore/dataLoad. However, these functions will be empty due to the fact that
127 // we are only interested in the JSON output and we don't want to output everything
128 template <>
129 void dataStore<Moose::LibtorchArtificialNeuralNet const>(
130  std::ostream & stream, Moose::LibtorchArtificialNeuralNet const *& nn, void * context);
131 
132 template <>
133 void dataLoad<Moose::LibtorchArtificialNeuralNet const>(
134  std::istream & stream, Moose::LibtorchArtificialNeuralNet const *& nn, void * context);
135 
136 #endif
MultiMooseEnum _activation_function
Activation functions (either one for all hidden layers or one for every layer separately) ...
unsigned int numInputs() const
Return the number of neurons on the input layer.
const torch::DeviceType _device_type
The device type used for this neural network.
unsigned int numHiddenLayers() const
Return the number of hidden layers.
const std::vector< unsigned int > _num_neurons_per_layer
Hidden layer architecture.
torch::DeviceType deviceType() const
Return the device which is used by this neural network.
std::vector< torch::nn::Linear > _weights
Submodules that hold linear operations and the corresponding weights and biases (y = W * x + b) ...
void constructNeuralNetwork()
Construct the neural network.
const std::string _name
Name of the neural network.
const MultiMooseEnum & activationFunctions() const
Return the multi enum containing the activation functions.
virtual void addLayer(const std::string &layer_name, const std::unordered_map< std::string, unsigned int > &parameters)
Add layers to the neural network.
torch::ScalarType dataType() const
Return the data type which is used by this neural network.
This base class is meant to gather the functions and members common in every neural network based on ...
const std::string & name() const
Return the name of the neural network.
void store(nlohmann::json &json) const
Store the network architecture in a json file (for debugging, visualization)
void to_json(nlohmann::json &json, const Moose::LibtorchArtificialNeuralNet *const &network)
unsigned int numOutputs() const
Return the number of neurons on the output layer.
const unsigned int _num_outputs
Number of neurons on the output layer.
const torch::ScalarType _data_type
The data type used in this neural network.
MOOSE now contains C++17 code, so give a reasonable error message stating what the user can do to add...
This is a "smart" enum class intended to replace many of the shortcomings in the C++ enum type...
const std::vector< unsigned int > & numNeuronsPerLayer() const
Return the hidden layer architecture.
LibtorchArtificialNeuralNet(const std::string name, const unsigned int num_inputs, const unsigned int num_outputs, const std::vector< unsigned int > &num_neurons_per_layer, const std::vector< std::string > &activation_function={"relu"}, const torch::DeviceType device_type=torch::kCPU, const torch::ScalarType scalar_type=torch::kDouble)
Construct using input parameters.
virtual torch::Tensor forward(const torch::Tensor &x) override
Overriding the forward substitution function for the neural network, unfortunately this cannot be con...