alpha_dropout.hpp
Go to the documentation of this file.
1 
16 #ifndef MLPACK_METHODS_ANN_LAYER_ALPHA_DROPOUT_HPP
17 #define MLPACK_METHODS_ANN_LAYER_ALPHA_DROPOUT_HPP
18 
19 #include <mlpack/prereqs.hpp>
20 
21 namespace mlpack {
22 namespace ann {
23 
48 template <typename InputDataType = arma::mat,
49  typename OutputDataType = arma::mat>
51 {
52  public:
59  AlphaDropout(const double ratio = 0.5,
60  const double alphaDash = -alpha * lambda);
61 
68  template<typename eT>
69  void Forward(const arma::Mat<eT>&& input, arma::Mat<eT>&& output);
70 
78  template<typename eT>
79  void Backward(const arma::Mat<eT>&& /* input */,
80  arma::Mat<eT>&& gy,
81  arma::Mat<eT>&& g);
82 
84  InputDataType const& InputParameter() const { return inputParameter; }
86  InputDataType& InputParameter() { return inputParameter; }
87 
89  OutputDataType const& OutputParameter() const { return outputParameter; }
91  OutputDataType& OutputParameter() { return outputParameter; }
92 
94  OutputDataType const& Delta() const { return delta; }
96  OutputDataType& Delta() { return delta; }
97 
99  bool Deterministic() const { return deterministic; }
101  bool& Deterministic() { return deterministic; }
102 
104  double Ratio() const { return ratio; }
105 
107  double A() const { return a; }
108 
110  double B() const { return b; }
111 
113  double AlphaDash() const {return alphaDash; }
114 
116  OutputDataType const& Mask() const {return mask;}
117 
120  void Ratio(const double r)
121  {
122  ratio = r;
123  a = pow((1 - ratio) * (1 + ratio * pow(alphaDash, 2)), -0.5);
124  b = -a * alphaDash * ratio;
125  }
126 
130  template<typename Archive>
131  void serialize(Archive& ar, const unsigned int /* version */);
132 
133  private:
135  OutputDataType delta;
136 
138  InputDataType inputParameter;
139 
141  OutputDataType outputParameter;
142 
144  OutputDataType mask;
145 
147  double ratio;
148 
150  double alphaDash;
151 
153  bool deterministic;
154 
156  static constexpr double alpha = 1.6732632423543772848170429916717;
157 
159  static constexpr double lambda = 1.0507009873554804934193349852946;
160 
162  double a;
163 
165  double b;
166 }; // class AlphaDropout
167 
168 } // namespace ann
169 } // namespace mlpack
170 
171 // Include implementation.
172 #include "alpha_dropout_impl.hpp"
173 
174 #endif
bool & Deterministic()
Modify the value of the deterministic parameter.
InputDataType const & InputParameter() const
Get the input parameter.
.hpp
Definition: add_to_po.hpp:21
double A() const
Value to be multiplied with x for affine transformation.
AlphaDropout(const double ratio=0.5, const double alphaDash=-alpha *lambda)
Create the Alpha_Dropout object using the specified ratio.
double Ratio() const
The probability of setting a value to alphaDash.
The core includes that mlpack expects; standard C++ includes and Armadillo.
double AlphaDash() const
Value of alphaDash.
void Forward(const arma::Mat< eT > &&input, arma::Mat< eT > &&output)
Ordinary feed forward pass of the alpha_dropout layer.
OutputDataType & OutputParameter()
Modify the output parameter.
OutputDataType const & Mask() const
Get the mask.
double B() const
Value to be added to a*x for affine transformation.
OutputDataType const & Delta() const
Get the detla.
InputDataType & InputParameter()
Modify the input parameter.
void Ratio(const double r)
Modify the probability of setting a value to alphaDash.
The alpha - dropout layer is a regularizer that randomly with probability &#39;ratio&#39; sets input values t...
void serialize(Archive &ar, const unsigned int)
Serialize the layer.
bool Deterministic() const
The value of the deterministic parameter.
OutputDataType const & OutputParameter() const
Get the output parameter.
OutputDataType & Delta()
Modify the delta.
void Backward(const arma::Mat< eT > &&, arma::Mat< eT > &&gy, arma::Mat< eT > &&g)
Ordinary feed backward pass of the alpha_dropout layer.