ffn.hpp
Go to the documentation of this file.
1 
13 #ifndef MLPACK_METHODS_ANN_FFN_HPP
14 #define MLPACK_METHODS_ANN_FFN_HPP
15 
16 #include <mlpack/prereqs.hpp>
17 
25 #include "visitor/copy_visitor.hpp"
26 #include "visitor/loss_visitor.hpp"
27 
29 
33 #include <ensmallen.hpp>
34 
35 namespace mlpack {
36 namespace ann {
37 
46 template<
47  typename OutputLayerType = NegativeLogLikelihood<>,
48  typename InitializationRuleType = RandomInitialization,
49  typename... CustomLayers
50 >
51 class FFN
52 {
53  public:
56 
70  FFN(OutputLayerType outputLayer = OutputLayerType(),
71  InitializationRuleType initializeRule = InitializationRuleType());
72 
74  FFN(const FFN&);
75 
77  FFN(FFN&&);
78 
81 
83  ~FFN();
84 
102  template<typename OptimizerType>
103  double Train(arma::mat predictors,
104  arma::mat responses,
105  OptimizerType& optimizer);
106 
124  template<typename OptimizerType = ens::RMSProp>
125  double Train(arma::mat predictors, arma::mat responses);
126 
138  void Predict(arma::mat predictors, arma::mat& results);
139 
147  double Evaluate(arma::mat predictors, arma::mat responses);
148 
157  double Evaluate(const arma::mat& parameters);
158 
172  double Evaluate(const arma::mat& parameters,
173  const size_t begin,
174  const size_t batchSize,
175  const bool deterministic);
176 
189  double Evaluate(const arma::mat& parameters,
190  const size_t begin,
191  const size_t batchSize);
192 
201  template<typename GradType>
202  double EvaluateWithGradient(const arma::mat& parameters, GradType& gradient);
203 
216  template<typename GradType>
217  double EvaluateWithGradient(const arma::mat& parameters,
218  const size_t begin,
219  GradType& gradient,
220  const size_t batchSize);
221 
234  void Gradient(const arma::mat& parameters,
235  const size_t begin,
236  arma::mat& gradient,
237  const size_t batchSize);
238 
243  void Shuffle();
244 
245  /*
246  * Add a new module to the model.
247  *
248  * @param args The layer parameter.
249  */
250  template <class LayerType, class... Args>
251  void Add(Args... args) { network.push_back(new LayerType(args...)); }
252 
253  /*
254  * Add a new module to the model.
255  *
256  * @param layer The Layer to be added to the model.
257  */
258  void Add(LayerTypes<CustomLayers...> layer) { network.push_back(layer); }
259 
261  size_t NumFunctions() const { return numFunctions; }
262 
264  const arma::mat& Parameters() const { return parameter; }
266  arma::mat& Parameters() { return parameter; }
267 
269  const arma::mat& Responses() const { return responses; }
271  arma::mat& Responses() { return responses; }
272 
274  const arma::mat& Predictors() const { return predictors; }
276  arma::mat& Predictors() { return predictors; }
277 
281  void ResetParameters();
282 
284  template<typename Archive>
285  void serialize(Archive& ar, const unsigned int /* version */);
286 
297  void Forward(arma::mat inputs, arma::mat& results);
298 
310  void Forward(arma::mat inputs,
311  arma::mat& results,
312  const size_t begin,
313  const size_t end);
314 
326  double Backward(arma::mat targets, arma::mat& gradients);
327 
328  private:
329  // Helper functions.
336  void Forward(arma::mat&& input);
337 
345  void ResetData(arma::mat predictors, arma::mat responses);
346 
351  void Backward();
352 
357  void Gradient(arma::mat&& input);
358 
363  void ResetDeterministic();
364 
368  void ResetGradients(arma::mat& gradient);
369 
375  void Swap(FFN& network);
376 
378  OutputLayerType outputLayer;
379 
382  InitializationRuleType initializeRule;
383 
385  size_t width;
386 
388  size_t height;
389 
391  bool reset;
392 
394  std::vector<LayerTypes<CustomLayers...> > network;
395 
397  arma::mat predictors;
398 
400  arma::mat responses;
401 
403  arma::mat parameter;
404 
406  size_t numFunctions;
407 
409  arma::mat error;
410 
412  arma::mat currentInput;
413 
415  DeltaVisitor deltaVisitor;
416 
418  OutputParameterVisitor outputParameterVisitor;
419 
421  WeightSizeVisitor weightSizeVisitor;
422 
424  OutputWidthVisitor outputWidthVisitor;
425 
427  OutputHeightVisitor outputHeightVisitor;
428 
430  LossVisitor lossVisitor;
431 
433  ResetVisitor resetVisitor;
434 
436  DeleteVisitor deleteVisitor;
437 
439  bool deterministic;
440 
442  arma::mat delta;
443 
445  arma::mat inputParameter;
446 
448  arma::mat outputParameter;
449 
451  arma::mat gradient;
452 
454  CopyVisitor<CustomLayers...> copyVisitor;
455 
456  // The GAN class should have access to internal members.
457  template<
458  typename Model,
459  typename InitializerType,
460  typename NoiseType,
461  typename PolicyType
462  >
463  friend class GAN;
464 }; // class FFN
465 
466 } // namespace ann
467 } // namespace mlpack
468 
471 namespace boost {
472 namespace serialization {
473 
474 template<typename OutputLayerType,
475  typename InitializationRuleType,
476  typename... CustomLayer>
477 struct version<
478  mlpack::ann::FFN<OutputLayerType, InitializationRuleType, CustomLayer...>>
479 {
480  BOOST_STATIC_CONSTANT(int, value = 1);
481 };
482 
483 } // namespace serialization
484 } // namespace boost
485 
486 // Include implementation.
487 #include "ffn_impl.hpp"
488 
489 #endif
DeleteVisitor executes the destructor of the instantiated object.
void Gradient(const arma::mat &parameters, const size_t begin, arma::mat &gradient, const size_t batchSize)
Evaluate the gradient of the feedforward network with the given parameters, and with respect to only ...
OutputHeightVisitor exposes the OutputHeight() method of the given module.
arma::mat & Responses()
Modify the matrix of responses to the input data points.
Definition: ffn.hpp:271
void serialize(Archive &ar, const unsigned int)
Serialize the model.
size_t NumFunctions() const
Return the number of separable functions (the number of predictor points).
Definition: ffn.hpp:261
void Predict(arma::mat predictors, arma::mat &results)
Predict the responses to a given set of predictors.
boost::variant< Add< arma::mat, arma::mat > *, AddMerge< arma::mat, arma::mat > *, AtrousConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, BaseLayer< LogisticFunction, arma::mat, arma::mat > *, BaseLayer< IdentityFunction, arma::mat, arma::mat > *, BaseLayer< TanhFunction, arma::mat, arma::mat > *, BaseLayer< RectifierFunction, arma::mat, arma::mat > *, BaseLayer< SoftplusFunction, arma::mat, arma::mat > *, BatchNorm< arma::mat, arma::mat > *, BilinearInterpolation< arma::mat, arma::mat > *, Concat< arma::mat, arma::mat > *, Concatenate< arma::mat, arma::mat > *, ConcatPerformance< NegativeLogLikelihood< arma::mat, arma::mat >, arma::mat, arma::mat > *, Constant< arma::mat, arma::mat > *, Convolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, TransposedConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, DropConnect< arma::mat, arma::mat > *, Dropout< arma::mat, arma::mat > *, AlphaDropout< arma::mat, arma::mat > *, ELU< arma::mat, arma::mat > *, FlexibleReLU< arma::mat, arma::mat > *, Glimpse< arma::mat, arma::mat > *, HardTanH< arma::mat, arma::mat > *, Join< arma::mat, arma::mat > *, LayerNorm< arma::mat, arma::mat > *, LeakyReLU< arma::mat, arma::mat > *, CReLU< arma::mat, arma::mat > *, Linear< arma::mat, arma::mat > *, LinearNoBias< arma::mat, arma::mat > *, LogSoftMax< arma::mat, arma::mat > *, Lookup< arma::mat, arma::mat > *, LSTM< arma::mat, arma::mat > *, GRU< arma::mat, arma::mat > *, FastLSTM< arma::mat, arma::mat > *, MaxPooling< arma::mat, arma::mat > *, MeanPooling< arma::mat, arma::mat > *, MultiplyConstant< arma::mat, arma::mat > *, MultiplyMerge< arma::mat, arma::mat > *, NegativeLogLikelihood< arma::mat, arma::mat > *, PReLU< arma::mat, arma::mat > *, Recurrent< arma::mat, arma::mat > *, RecurrentAttention< arma::mat, arma::mat > *, ReinforceNormal< arma::mat, arma::mat > *, Reparametrization< arma::mat, arma::mat > *, Select< arma::mat, arma::mat > *, Sequential< arma::mat, arma::mat, false > *, Sequential< arma::mat, arma::mat, true > *, Subview< arma::mat, arma::mat > *, VRClassReward< arma::mat, arma::mat > *, CustomLayers *... > LayerTypes
Set the serialization version of the adaboost class.
Definition: adaboost.hpp:180
.hpp
Definition: add_to_po.hpp:21
LossVisitor exposes the Loss() method of the given module.
This visitor is to support copy constructor for neural network module.
void Add(Args... args)
Definition: ffn.hpp:251
const arma::mat & Predictors() const
Get the matrix of data points (predictors).
Definition: ffn.hpp:274
The core includes that mlpack expects; standard C++ includes and Armadillo.
friend class GAN
Definition: ffn.hpp:463
double Train(arma::mat predictors, arma::mat responses, OptimizerType &optimizer)
Train the feedforward network on the given input data using the given optimizer.
WeightSizeVisitor returns the number of weights of the given module.
void Forward(arma::mat inputs, arma::mat &results)
Perform the forward pass of the data in real batch mode.
FFN & operator=(FFN)
Copy/move assignment operator.
void Shuffle()
Shuffle the order of function visitation.
~FFN()
Destructor to release allocated memory.
Implementation of the base layer.
Definition: base_layer.hpp:49
const arma::mat & Responses() const
Get the matrix of responses to the input data points.
Definition: ffn.hpp:269
ResetVisitor executes the Reset() function.
double EvaluateWithGradient(const arma::mat &parameters, GradType &gradient)
Evaluate the feedforward network with the given parameters.
OutputParameterVisitor exposes the output parameter of the given module.
void Add(LayerTypes< CustomLayers... > layer)
Definition: ffn.hpp:258
arma::mat & Parameters()
Modify the initial point for the optimization.
Definition: ffn.hpp:266
void ResetParameters()
Reset the module infomration (weights/parameters).
arma::mat & Predictors()
Modify the matrix of data points (predictors).
Definition: ffn.hpp:276
const arma::mat & Parameters() const
Return the initial point for the optimization.
Definition: ffn.hpp:264
DeltaVisitor exposes the delta parameter of the given module.
Implementation of a standard feed forward network.
Definition: ffn.hpp:51
OutputWidthVisitor exposes the OutputWidth() method of the given module.
double Backward(arma::mat targets, arma::mat &gradients)
Perform the backward pass of the data in real batch mode.
double Evaluate(arma::mat predictors, arma::mat responses)
Evaluate the feedforward network with the given predictors and responses.
FFN(OutputLayerType outputLayer=OutputLayerType(), InitializationRuleType initializeRule=InitializationRuleType())
Create the FFN object.