24 #ifndef MLPACK_METHODS_ANN_LAYER_ELU_HPP 25 #define MLPACK_METHODS_ANN_LAYER_ELU_HPP 106 typename InputDataType = arma::mat,
107 typename OutputDataType = arma::mat
127 ELU(
const double alpha);
136 template<
typename InputType,
typename OutputType>
137 void Forward(
const InputType&& input, OutputType&& output);
148 template<
typename DataType>
149 void Backward(
const DataType&& input, DataType&& gy, DataType&& g);
157 OutputDataType
const&
Delta()
const {
return delta; }
159 OutputDataType&
Delta() {
return delta; }
162 double const&
Alpha()
const {
return alpha; }
167 double const&
Lambda()
const {
return lambda; }
172 template<
typename Archive>
173 void serialize(Archive& ar,
const unsigned int );
182 double Fn(
const double x)
186 return (x > 0) ? lambda * x : lambda * alpha * (std::exp(x) - 1);
198 template<
typename eT>
199 void Fn(
const arma::Mat<eT>& x, arma::Mat<eT>& y)
201 y.set_size(arma::size(x));
203 for (
size_t i = 0; i < x.n_elem; i++)
216 double Deriv(
const double x,
const double y)
218 return (x > 0) ? lambda : y + lambda * alpha;
228 template<
typename InputType,
typename OutputType>
229 void Deriv(
const InputType& x, OutputType& y)
231 derivative.set_size(arma::size(x));
233 for (
size_t i = 0; i < x.n_elem; i++)
235 derivative(i) = Deriv(x(i), y(i));
240 OutputDataType delta;
243 OutputDataType outputParameter;
246 arma::mat derivative;
269 #include "elu_impl.hpp" double const & Alpha() const
Get the non zero gradient.
void Backward(const DataType &&input, DataType &&gy, DataType &&g)
Ordinary feed backward pass of a neural network, calculating the function f(x) by propagating x backw...
OutputDataType & Delta()
Modify the delta.
The core includes that mlpack expects; standard C++ includes and Armadillo.
void serialize(Archive &ar, const unsigned int)
Serialize the layer.
void Forward(const InputType &&input, OutputType &&output)
Ordinary feed forward pass of a neural network, evaluating the function f(x) by propagating the activ...
OutputDataType const & OutputParameter() const
Get the output parameter.
ELU()
Create the ELU object.
double & Alpha()
Modify the non zero gradient.
The ELU activation function, defined by.
OutputDataType & OutputParameter()
Modify the output parameter.
double const & Lambda() const
Get the lambda parameter.
OutputDataType const & Delta() const
Get the delta.