elu.hpp
Go to the documentation of this file.
1 
24 #ifndef MLPACK_METHODS_ANN_LAYER_ELU_HPP
25 #define MLPACK_METHODS_ANN_LAYER_ELU_HPP
26 
27 #include <mlpack/prereqs.hpp>
28 
29 namespace mlpack {
30 namespace ann {
31 
105 template <
106  typename InputDataType = arma::mat,
107  typename OutputDataType = arma::mat
108 >
109 class ELU
110 {
111  public:
117  ELU();
118 
127  ELU(const double alpha);
128 
136  template<typename InputType, typename OutputType>
137  void Forward(const InputType&& input, OutputType&& output);
138 
148  template<typename DataType>
149  void Backward(const DataType&& input, DataType&& gy, DataType&& g);
150 
152  OutputDataType const& OutputParameter() const { return outputParameter; }
154  OutputDataType& OutputParameter() { return outputParameter; }
155 
157  OutputDataType const& Delta() const { return delta; }
159  OutputDataType& Delta() { return delta; }
160 
162  double const& Alpha() const { return alpha; }
164  double& Alpha() { return alpha; }
165 
167  double const& Lambda() const { return lambda; }
168 
172  template<typename Archive>
173  void serialize(Archive& ar, const unsigned int /* version */);
174 
175  private:
182  double Fn(const double x)
183  {
184  if (x < DBL_MAX)
185  {
186  return (x > 0) ? lambda * x : lambda * alpha * (std::exp(x) - 1);
187  }
188 
189  return 1.0;
190  }
191 
198  template<typename eT>
199  void Fn(const arma::Mat<eT>& x, arma::Mat<eT>& y)
200  {
201  y.set_size(arma::size(x));
202 
203  for (size_t i = 0; i < x.n_elem; i++)
204  {
205  y(i) = Fn(x(i));
206  }
207  }
208 
216  double Deriv(const double x, const double y)
217  {
218  return (x > 0) ? lambda : y + lambda * alpha;
219  }
220 
228  template<typename InputType, typename OutputType>
229  void Deriv(const InputType& x, OutputType& y)
230  {
231  derivative.set_size(arma::size(x));
232 
233  for (size_t i = 0; i < x.n_elem; i++)
234  {
235  derivative(i) = Deriv(x(i), y(i));
236  }
237  }
238 
240  OutputDataType delta;
241 
243  OutputDataType outputParameter;
244 
246  arma::mat derivative;
247 
250  double alpha;
251 
256  double lambda;
257 
259  bool deterministic;
260 }; // class ELU
261 
262 // Template alias for SELU using ELU class.
264 
265 } // namespace ann
266 } // namespace mlpack
267 
268 // Include implementation.
269 #include "elu_impl.hpp"
270 
271 #endif
double const & Alpha() const
Get the non zero gradient.
Definition: elu.hpp:162
.hpp
Definition: add_to_po.hpp:21
void Backward(const DataType &&input, DataType &&gy, DataType &&g)
Ordinary feed backward pass of a neural network, calculating the function f(x) by propagating x backw...
OutputDataType & Delta()
Modify the delta.
Definition: elu.hpp:159
The core includes that mlpack expects; standard C++ includes and Armadillo.
void serialize(Archive &ar, const unsigned int)
Serialize the layer.
void Forward(const InputType &&input, OutputType &&output)
Ordinary feed forward pass of a neural network, evaluating the function f(x) by propagating the activ...
OutputDataType const & OutputParameter() const
Get the output parameter.
Definition: elu.hpp:152
ELU()
Create the ELU object.
double & Alpha()
Modify the non zero gradient.
Definition: elu.hpp:164
The ELU activation function, defined by.
Definition: elu.hpp:109
OutputDataType & OutputParameter()
Modify the output parameter.
Definition: elu.hpp:154
double const & Lambda() const
Get the lambda parameter.
Definition: elu.hpp:167
OutputDataType const & Delta() const
Get the delta.
Definition: elu.hpp:157