leaky_relu.hpp
Go to the documentation of this file.
1 
14 #ifndef MLPACK_METHODS_ANN_LAYER_LEAKYRELU_HPP
15 #define MLPACK_METHODS_ANN_LAYER_LEAKYRELU_HPP
16 
17 #include <mlpack/prereqs.hpp>
18 
19 namespace mlpack {
20 namespace ann {
21 
40 template <
41  typename InputDataType = arma::mat,
42  typename OutputDataType = arma::mat
43 >
44 class LeakyReLU
45 {
46  public:
54  LeakyReLU(const double alpha = 0.03);
55 
63  template<typename InputType, typename OutputType>
64  void Forward(const InputType&& input, OutputType&& output);
65 
75  template<typename DataType>
76  void Backward(const DataType&& input, DataType&& gy, DataType&& g);
77 
79  InputDataType const& InputParameter() const { return inputParameter; }
81  InputDataType& InputParameter() { return inputParameter; }
82 
84  OutputDataType const& OutputParameter() const { return outputParameter; }
86  OutputDataType& OutputParameter() { return outputParameter; }
87 
89  OutputDataType const& Delta() const { return delta; }
91  OutputDataType& Delta() { return delta; }
92 
94  double const& Alpha() const { return alpha; }
96  double& Alpha() { return alpha; }
97 
101  template<typename Archive>
102  void serialize(Archive& ar, const unsigned int /* version */);
103 
104  private:
111  double Fn(const double x)
112  {
113  return std::max(x, alpha * x);
114  }
115 
122  template<typename eT>
123  void Fn(const arma::Mat<eT>& x, arma::Mat<eT>& y)
124  {
125  y = arma::max(x, alpha * x);
126  }
127 
134  double Deriv(const double x)
135  {
136  return (x >= 0) ? 1 : alpha;
137  }
138 
146  template<typename InputType, typename OutputType>
147  void Deriv(const InputType& x, OutputType& y)
148  {
149  y = x;
150 
151  for (size_t i = 0; i < x.n_elem; i++)
152  {
153  y(i) = Deriv(x(i));
154  }
155  }
156 
158  OutputDataType delta;
159 
161  InputDataType inputParameter;
162 
164  OutputDataType outputParameter;
165 
167  double alpha;
168 }; // class LeakyReLU
169 
170 } // namespace ann
171 } // namespace mlpack
172 
173 // Include implementation.
174 #include "leaky_relu_impl.hpp"
175 
176 #endif
InputDataType & InputParameter()
Modify the input parameter.
Definition: leaky_relu.hpp:81
void serialize(Archive &ar, const unsigned int)
Serialize the layer.
LeakyReLU(const double alpha=0.03)
Create the LeakyReLU object using the specified parameters.
.hpp
Definition: add_to_po.hpp:21
void Backward(const DataType &&input, DataType &&gy, DataType &&g)
Ordinary feed backward pass of a neural network, calculating the function f(x) by propagating x backw...
The core includes that mlpack expects; standard C++ includes and Armadillo.
InputDataType const & InputParameter() const
Get the input parameter.
Definition: leaky_relu.hpp:79
void Forward(const InputType &&input, OutputType &&output)
Ordinary feed forward pass of a neural network, evaluating the function f(x) by propagating the activ...
The LeakyReLU activation function, defined by.
Definition: leaky_relu.hpp:44
OutputDataType const & Delta() const
Get the delta.
Definition: leaky_relu.hpp:89
double const & Alpha() const
Get the non zero gradient.
Definition: leaky_relu.hpp:94
OutputDataType const & OutputParameter() const
Get the output parameter.
Definition: leaky_relu.hpp:84
double & Alpha()
Modify the non zero gradient.
Definition: leaky_relu.hpp:96
OutputDataType & OutputParameter()
Modify the output parameter.
Definition: leaky_relu.hpp:86
OutputDataType & Delta()
Modify the delta.
Definition: leaky_relu.hpp:91