LCOV - code coverage report
Current view: top level - src/materials - NeuralNetFreeEnergy.C (source / functions) Hit Total Coverage
Test: idaholab/magpie: 5710af Lines: 17 28 60.7 %
Date: 2025-07-21 23:34:39 Functions: 3 3 100.0 %
Legend: Lines: hit not hit

          Line data    Source code
       1             : /**********************************************************************/
       2             : /*                     DO NOT MODIFY THIS HEADER                      */
       3             : /* MAGPIE - Mesoscale Atomistic Glue Program for Integrated Execution */
       4             : /*                                                                    */
       5             : /*            Copyright 2017 Battelle Energy Alliance, LLC            */
       6             : /*                        ALL RIGHTS RESERVED                         */
       7             : /**********************************************************************/
       8             : 
       9             : #include "NeuralNetFreeEnergy.h"
      10             : 
      11             : registerADMooseObject("MagpieApp", NeuralNetFreeEnergy);
      12             : 
      13             : InputParameters
      14          38 : NeuralNetFreeEnergy::validParams()
      15             : {
      16          38 :   auto params = NeuralNetFreeEnergyBase::validParams();
      17          38 :   params.addClassDescription("Evaluates a fitted deep neural network to obtain a free energy and "
      18             :                              "its derivatives with a preset activation function.");
      19             : 
      20          76 :   MooseEnum activationFunctionEnum("SIGMOID SOFTSIGN TANH", "SIGMOID");
      21          76 :   params.template addParam<MooseEnum>(
      22             :       "activation_function", activationFunctionEnum, "Weights and biases file format");
      23          38 :   return params;
      24          38 : }
      25             : 
      26          30 : NeuralNetFreeEnergy::NeuralNetFreeEnergy(const InputParameters & parameters)
      27             :   : NeuralNetFreeEnergyBase(parameters),
      28          30 :     _activation_function(
      29          30 :         getParam<MooseEnum>("activation_function").template getEnum<ActivationFunction>())
      30             : {
      31          30 : }
      32             : 
      33             : void
      34       12000 : NeuralNetFreeEnergy::applyLayerActivation()
      35             : {
      36       12000 :   switch (_activation_function)
      37             :   {
      38             :     case ActivationFunction::SIGMOID:
      39      264000 :       for (std::size_t j = 0; j < _z[_layer].size(); ++j)
      40             :       {
      41             :         const auto & z = _z[_layer](j);
      42             : 
      43      756000 :         const auto F = 1.0 / (1.0 + std::exp(-z));
      44      252000 :         _activation[_layer + 1](j) = F;
      45             : 
      46             :         // Note dF(z)/dz = F(z)*(1-F(z)), thus the expensive sigmoid only has to be computed once!
      47      504000 :         _d_activation[_layer + 1](j) = F * (1 - F);
      48             :       }
      49             :       return;
      50             : 
      51             :     case ActivationFunction::SOFTSIGN:
      52           0 :       for (std::size_t j = 0; j < _z[_layer].size(); ++j)
      53             :       {
      54             :         const auto & z = _z[_layer](j);
      55             : 
      56           0 :         const auto p = 1.0 + std::abs(z);
      57             :         const auto F = z / p;
      58           0 :         _activation[_layer + 1](j) = F;
      59             : 
      60           0 :         const auto dF = -std::abs(z) / (p * p) + 1.0 / p;
      61           0 :         _d_activation[_layer + 1](j) = dF;
      62             :       }
      63             :       return;
      64             : 
      65             :     case ActivationFunction::TANH:
      66           0 :       for (std::size_t j = 0; j < _z[_layer].size(); ++j)
      67             :       {
      68             :         const auto & z = _z[_layer](j);
      69             : 
      70           0 :         const auto F = std::tanh(z);
      71           0 :         _activation[_layer + 1](j) = F;
      72             : 
      73           0 :         _d_activation[_layer + 1](j) = 1.0 - F * F;
      74             :       }
      75             :       return;
      76             : 
      77           0 :     default:
      78           0 :       paramError("activation_function", "Unknown activation function");
      79             :   }
      80             : }

Generated by: LCOV version 1.14