elu.hpp
Go to the documentation of this file.
1 
24 #ifndef MLPACK_METHODS_ANN_LAYER_ELU_HPP
25 #define MLPACK_METHODS_ANN_LAYER_ELU_HPP
26 
27 #include <mlpack/prereqs.hpp>
28 
29 namespace mlpack {
30 namespace ann {
31 
101 template <
102  typename InputDataType = arma::mat,
103  typename OutputDataType = arma::mat
104 >
105 class ELU
106 {
107  public:
113  ELU();
114 
123  ELU(const double alpha);
124 
132  template<typename InputType, typename OutputType>
133  void Forward(const InputType&& input, OutputType&& output);
134 
144  template<typename DataType>
145  void Backward(const DataType&& input, DataType&& gy, DataType&& g);
146 
148  InputDataType const& InputParameter() const { return inputParameter; }
150  InputDataType& InputParameter() { return inputParameter; }
151 
153  OutputDataType const& OutputParameter() const { return outputParameter; }
155  OutputDataType& OutputParameter() { return outputParameter; }
156 
158  OutputDataType const& Delta() const { return delta; }
160  OutputDataType& Delta() { return delta; }
161 
163  double const& Alpha() const { return alpha; }
165  double& Alpha() { return alpha; }
166 
168  double const& Lambda() const { return lambda; }
169 
173  template<typename Archive>
174  void serialize(Archive& ar, const unsigned int /* version */);
175 
176  private:
183  double Fn(const double x)
184  {
185  if (x < DBL_MAX)
186  {
187  return (x > 0) ? lambda * x : lambda * alpha * (std::exp(x) - 1);
188  }
189 
190  return 1.0;
191  }
192 
199  template<typename eT>
200  void Fn(const arma::Mat<eT>& x, arma::Mat<eT>& y)
201  {
202  y.set_size(size(x));
203 
204  for (size_t i = 0; i < x.n_elem; i++)
205  {
206  y(i) = Fn(x(i));
207  }
208  }
209 
216  double Deriv(const double y)
217  {
218  return (y > 0) ? lambda : lambda * (y + alpha);
219  }
220 
227  template<typename InputType, typename OutputType>
228  void Deriv(const InputType& x, OutputType& y)
229  {
230  y = x;
231 
232  for (size_t i = 0; i < x.n_elem; i++)
233  {
234  y(i) = Deriv(x(i));
235  }
236  }
237 
239  OutputDataType delta;
240 
242  InputDataType inputParameter;
243 
245  OutputDataType outputParameter;
246 
249  double alpha;
250 
255  double lambda;
256 }; // class ELU
257 
258 // Template alias for SELU using ELU class.
260 
261 } // namespace ann
262 } // namespace mlpack
263 
264 // Include implementation.
265 #include "elu_impl.hpp"
266 
267 #endif
double const & Alpha() const
Get the non zero gradient.
Definition: elu.hpp:163
.hpp
Definition: add_to_po.hpp:21
void Backward(const DataType &&input, DataType &&gy, DataType &&g)
Ordinary feed backward pass of a neural network, calculating the function f(x) by propagating x backw...
OutputDataType & Delta()
Modify the delta.
Definition: elu.hpp:160
The core includes that mlpack expects; standard C++ includes and Armadillo.
void serialize(Archive &ar, const unsigned int)
Serialize the layer.
void Forward(const InputType &&input, OutputType &&output)
Ordinary feed forward pass of a neural network, evaluating the function f(x) by propagating the activ...
InputDataType & InputParameter()
Modify the input parameter.
Definition: elu.hpp:150
InputDataType const & InputParameter() const
Get the input parameter.
Definition: elu.hpp:148
OutputDataType const & OutputParameter() const
Get the output parameter.
Definition: elu.hpp:153
ELU()
Create the ELU object.
double & Alpha()
Modify the non zero gradient.
Definition: elu.hpp:165
The ELU activation function, defined by.
Definition: elu.hpp:105
OutputDataType & OutputParameter()
Modify the output parameter.
Definition: elu.hpp:155
double const & Lambda() const
Get the lambda parameter.
Definition: elu.hpp:168
OutputDataType const & Delta() const
Get the delta.
Definition: elu.hpp:158