flexible_relu.hpp
Go to the documentation of this file.
1 
16 #ifndef MLPACK_METHODS_ANN_LAYER_FLEXIBLERELU_HPP
17 #define MLPACK_METHODS_ANN_LAYER_FLEXIBLERELU_HPP
18 
19 #include <mlpack/prereqs.hpp>
20 
21 namespace mlpack {
22 namespace ann {
23 
55 template <
56  typename InputDataType = arma::mat,
57  typename OutputDataType = arma::mat
58 >
60 {
61  public:
72  FlexibleReLU(const double alpha = 0);
73 
77  void Reset();
78 
86  template<typename InputType, typename OutputType>
87  void Forward(const InputType&& input, OutputType&& output);
88 
98  template<typename DataType>
99  void Backward(const DataType&& input, DataType&& gy, DataType&& g);
100 
108  template<typename eT>
109  void Gradient(const arma::Mat<eT>&& input,
110  arma::Mat<eT>&& error,
111  arma::Mat<eT>&& gradient);
112 
114  OutputDataType const& Parameters() const { return alpha; }
116  OutputDataType& Parameters() { return alpha; }
117 
119  InputDataType const& InputParameter() const { return inputParameter; }
121  InputDataType& InputParameter() { return inputParameter; }
122 
124  OutputDataType const& OutputParameter() const { return outputParameter; }
126  OutputDataType& OutputParameter() { return outputParameter; }
127 
129  OutputDataType const& Delta() const { return delta; }
131  OutputDataType& Delta() { return delta;}
132 
134  OutputDataType const& Gradient() const { return gradient; }
136  OutputDataType& Gradient() { return gradient; }
137 
139  double const& Alpha() const { return alpha; }
141  double& Alpha() { return alpha; }
142 
146  template<typename Archive>
147  void serialize(Archive& ar, const unsigned int /* version*/);
148 
149  private:
151  OutputDataType delta;
152 
154  InputDataType inputParameter;
155 
157  OutputDataType outputParameter;
158 
160  OutputDataType alpha;
161 
163  OutputDataType gradient;
164 
166  double userAlpha;
167 }; // class FlexibleReLU
168 
169 } // namespace ann
170 } // namespace mlpack
171 
172 // Include implementation
173 #include "flexible_relu_impl.hpp"
174 
175 #endif
FlexibleReLU(const double alpha=0)
Create the FlexibleReLU object using the specified parameters.
void Backward(const DataType &&input, DataType &&gy, DataType &&g)
Ordinary feed backward pass of a neural network, calculating the function f(x) by propagating x backw...
.hpp
Definition: add_to_po.hpp:21
The FlexibleReLU activation function, defined by.
void serialize(Archive &ar, const unsigned int)
Serialize the layer.
OutputDataType const & Delta() const
Get the delta.
The core includes that mlpack expects; standard C++ includes and Armadillo.
double & Alpha()
Modify the parameter controlling the range of the relu function.
OutputDataType & Gradient()
Modify the gradient.
OutputDataType const & Gradient() const
Get the gradient.
double const & Alpha() const
Get the parameter controlling the range of the relu function.
OutputDataType const & Parameters() const
Get the parameters.
void Reset()
Reset the layer parameter.
InputDataType const & InputParameter() const
Get the input parameter.
OutputDataType & OutputParameter()
Modify the output parameter.
OutputDataType const & OutputParameter() const
Get the output parameter.
OutputDataType & Parameters()
Modify the parameters.
void Forward(const InputType &&input, OutputType &&output)
Ordinary feed forward pass of a neural network, evaluating the function f(x) by propagating the activ...
InputDataType & InputParameter()
Modify the input parameter.
OutputDataType & Delta()
Modify the delta.