15 #ifndef MLPACK_METHODS_ANN_LAYER_PReLU_HPP 16 #define MLPACK_METHODS_ANN_LAYER_PReLU_HPP 42 typename InputDataType = arma::mat,
43 typename OutputDataType = arma::mat
56 PReLU(
const double userAlpha = 0.03);
70 template<
typename InputType,
typename OutputType>
71 void Forward(
const InputType&& input, OutputType&& output);
82 template<
typename DataType>
83 void Backward(
const DataType&& input, DataType&& gy, DataType&& g);
93 void Gradient(
const arma::Mat<eT>&& input,
94 arma::Mat<eT>&& error,
95 arma::Mat<eT>&& gradient);
98 OutputDataType
const&
Parameters()
const {
return alpha; }
108 OutputDataType
const&
Delta()
const {
return delta; }
110 OutputDataType&
Delta() {
return delta; }
113 OutputDataType
const&
Gradient()
const {
return gradient; }
118 double const&
Alpha()
const {
return alpha(0); }
120 double&
Alpha() {
return alpha(0); }
125 template<
typename Archive>
126 void serialize(Archive& ar,
const unsigned int );
135 double Fn(
const double x)
137 return std::max(x, alpha(0) * x);
146 template<
typename eT>
147 void Fn(
const arma::Mat<eT>& x, arma::Mat<eT>& y)
150 arma::uvec negative = arma::find(x < 0);
151 y(negative) = x(negative) * alpha(0);
160 double Deriv(
const double x)
162 return (x >= 0) ? 1 : alpha(0);
172 template<
typename InputType,
typename OutputType>
173 void Deriv(
const InputType& x, OutputType& y)
175 y.set_size(arma::size(x));
177 for (
size_t i = 0; i < x.n_elem; i++)
184 OutputDataType delta;
187 OutputDataType outputParameter;
190 OutputDataType alpha;
193 OutputDataType gradient;
203 #include "parametric_relu_impl.hpp" double & Alpha()
Modify the non zero gradient.
PReLU(const double userAlpha=0.03)
Create the PReLU object using the specified parameters.
The core includes that mlpack expects; standard C++ includes and Armadillo.
OutputDataType const & Parameters() const
Get the parameters.
void Forward(const InputType &&input, OutputType &&output)
Ordinary feed forward pass of a neural network, evaluating the function f(x) by propagating the activ...
OutputDataType const & Gradient() const
Get the gradient.
The PReLU activation function, defined by (where alpha is trainable)
void Backward(const DataType &&input, DataType &&gy, DataType &&g)
Ordinary feed backward pass of a neural network, calculating the function f(x) by propagating x backw...
OutputDataType const & Delta() const
Get the delta.
OutputDataType & Parameters()
Modify the parameters.
void serialize(Archive &ar, const unsigned int)
Serialize the layer.
OutputDataType & OutputParameter()
Modify the output parameter.
OutputDataType & Delta()
Modify the delta.
OutputDataType & Gradient()
Modify the gradient.
OutputDataType const & OutputParameter() const
Get the output parameter.
double const & Alpha() const
Get the non zero gradient.