ffn.hpp
Go to the documentation of this file.
1 
13 #ifndef MLPACK_METHODS_ANN_FFN_HPP
14 #define MLPACK_METHODS_ANN_FFN_HPP
15 
16 #include <mlpack/prereqs.hpp>
17 
25 #include "visitor/copy_visitor.hpp"
26 
28 
33 
34 namespace mlpack {
35 namespace ann {
36 
45 template<
46  typename OutputLayerType = NegativeLogLikelihood<>,
47  typename InitializationRuleType = RandomInitialization,
48  typename... CustomLayers
49 >
50 class FFN
51 {
52  public:
55 
69  FFN(OutputLayerType outputLayer = OutputLayerType(),
70  InitializationRuleType initializeRule = InitializationRuleType());
71 
73  FFN(const FFN&);
74 
76  FFN(FFN&&);
77 
80 
82  ~FFN();
83 
100  template<typename OptimizerType>
101  void Train(arma::mat predictors,
102  arma::mat responses,
103  OptimizerType& optimizer);
104 
121  template<typename OptimizerType = mlpack::optimization::RMSProp>
122  void Train(arma::mat predictors, arma::mat responses);
123 
135  void Predict(arma::mat predictors, arma::mat& results);
136 
145  double Evaluate(const arma::mat& parameters);
146 
160  double Evaluate(const arma::mat& parameters,
161  const size_t begin,
162  const size_t batchSize,
163  const bool deterministic);
164 
177  double Evaluate(const arma::mat& parameters,
178  const size_t begin,
179  const size_t batchSize);
180 
188  template<typename GradType>
189  double EvaluateWithGradient(const arma::mat& parameters, GradType& gradient);
190 
205  template<typename GradType>
206  double EvaluateWithGradient(const arma::mat& parameters,
207  const size_t begin,
208  GradType& gradient,
209  const size_t batchSize,
210  const bool deterministic);
211 
225  template<typename GradType>
226  double EvaluateWithGradient(const arma::mat& parameters,
227  const size_t begin,
228  GradType& gradient,
229  const size_t batchSize);
230 
243  void Gradient(const arma::mat& parameters,
244  const size_t begin,
245  arma::mat& gradient,
246  const size_t batchSize);
247 
252  void Shuffle();
253 
254  /*
255  * Add a new module to the model.
256  *
257  * @param args The layer parameter.
258  */
259  template <class LayerType, class... Args>
260  void Add(Args... args) { network.push_back(new LayerType(args...)); }
261 
262  /*
263  * Add a new module to the model.
264  *
265  * @param layer The Layer to be added to the model.
266  */
267  void Add(LayerTypes<CustomLayers...> layer) { network.push_back(layer); }
268 
270  size_t NumFunctions() const { return numFunctions; }
271 
273  const arma::mat& Parameters() const { return parameter; }
275  arma::mat& Parameters() { return parameter; }
276 
278  const arma::mat& Responses() const { return responses; }
280  arma::mat& Responses() { return responses; }
281 
283  const arma::mat& Predictors() const { return predictors; }
285  arma::mat& Predictors() { return predictors; }
286 
290  void ResetParameters();
291 
293  template<typename Archive>
294  void serialize(Archive& ar, const unsigned int /* version */);
295 
306  void Forward(arma::mat inputs, arma::mat& results);
307 
319  void Forward(arma::mat inputs,
320  arma::mat& results,
321  const size_t begin,
322  const size_t end);
323 
335  double Backward(arma::mat targets, arma::mat& gradients);
336 
337  private:
338  // Helper functions.
345  void Forward(arma::mat&& input);
346 
354  void ResetData(arma::mat predictors, arma::mat responses);
355 
360  void Backward();
361 
366  void Gradient(arma::mat&& input);
367 
372  void ResetDeterministic();
373 
377  void ResetGradients(arma::mat& gradient);
378 
384  void Swap(FFN& network);
385 
387  OutputLayerType outputLayer;
388 
391  InitializationRuleType initializeRule;
392 
394  size_t width;
395 
397  size_t height;
398 
400  bool reset;
401 
403  std::vector<LayerTypes<CustomLayers...> > network;
404 
406  arma::mat predictors;
407 
409  arma::mat responses;
410 
412  arma::mat parameter;
413 
415  size_t numFunctions;
416 
418  arma::mat error;
419 
421  arma::mat currentInput;
422 
424  DeltaVisitor deltaVisitor;
425 
427  OutputParameterVisitor outputParameterVisitor;
428 
430  WeightSizeVisitor weightSizeVisitor;
431 
433  OutputWidthVisitor outputWidthVisitor;
434 
436  OutputHeightVisitor outputHeightVisitor;
437 
439  ResetVisitor resetVisitor;
440 
442  DeleteVisitor deleteVisitor;
443 
445  bool deterministic;
446 
448  arma::mat delta;
449 
451  arma::mat inputParameter;
452 
454  arma::mat outputParameter;
455 
457  arma::mat gradient;
458 
460  CopyVisitor<CustomLayers...> copyVisitor;
461 }; // class FFN
462 
463 } // namespace ann
464 } // namespace mlpack
465 
468 namespace boost {
469 namespace serialization {
470 
471 template<typename OutputLayerType,
472  typename InitializationRuleType,
473  typename... CustomLayer>
474 struct version<
475  mlpack::ann::FFN<OutputLayerType, InitializationRuleType, CustomLayer...>>
476 {
477  BOOST_STATIC_CONSTANT(int, value = 1);
478 };
479 
480 } // namespace serialization
481 } // namespace boost
482 
483 // Include implementation.
484 #include "ffn_impl.hpp"
485 
486 #endif
DeleteVisitor executes the destructor of the instantiated object.
void Gradient(const arma::mat &parameters, const size_t begin, arma::mat &gradient, const size_t batchSize)
Evaluate the gradient of the feedforward network with the given parameters, and with respect to only ...
OutputWidthVisitor exposes the OutputHeight() method of the given module.
arma::mat & Responses()
Modify the matrix of responses to the input data points.
Definition: ffn.hpp:280
void serialize(Archive &ar, const unsigned int)
Serialize the model.
size_t NumFunctions() const
Return the number of separable functions (the number of predictor points).
Definition: ffn.hpp:270
boost::variant< Add< arma::mat, arma::mat > *, AddMerge< arma::mat, arma::mat > *, AtrousConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, BaseLayer< LogisticFunction, arma::mat, arma::mat > *, BaseLayer< IdentityFunction, arma::mat, arma::mat > *, BaseLayer< TanhFunction, arma::mat, arma::mat > *, BaseLayer< RectifierFunction, arma::mat, arma::mat > *, BatchNorm< arma::mat, arma::mat > *, BilinearInterpolation< arma::mat, arma::mat > *, Concat< arma::mat, arma::mat > *, ConcatPerformance< NegativeLogLikelihood< arma::mat, arma::mat >, arma::mat, arma::mat > *, Constant< arma::mat, arma::mat > *, Convolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, TransposedConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, DropConnect< arma::mat, arma::mat > *, Dropout< arma::mat, arma::mat > *, AlphaDropout< arma::mat, arma::mat > *, ELU< arma::mat, arma::mat > *, FlexibleReLU< arma::mat, arma::mat > *, Glimpse< arma::mat, arma::mat > *, HardTanH< arma::mat, arma::mat > *, Join< arma::mat, arma::mat > *, LayerNorm< arma::mat, arma::mat > *, LeakyReLU< arma::mat, arma::mat > *, Linear< arma::mat, arma::mat > *, LinearNoBias< arma::mat, arma::mat > *, LogSoftMax< arma::mat, arma::mat > *, Lookup< arma::mat, arma::mat > *, LSTM< arma::mat, arma::mat > *, GRU< arma::mat, arma::mat > *, FastLSTM< arma::mat, arma::mat > *, MaxPooling< arma::mat, arma::mat > *, MeanPooling< arma::mat, arma::mat > *, MultiplyConstant< arma::mat, arma::mat > *, MultiplyMerge< arma::mat, arma::mat > *, NegativeLogLikelihood< arma::mat, arma::mat > *, PReLU< arma::mat, arma::mat > *, Recurrent< arma::mat, arma::mat > *, RecurrentAttention< arma::mat, arma::mat > *, ReinforceNormal< arma::mat, arma::mat > *, Select< arma::mat, arma::mat > *, Sequential< arma::mat, arma::mat > *, VRClassReward< arma::mat, arma::mat > *, CustomLayers *... > LayerTypes
double Evaluate(const arma::mat &parameters)
Evaluate the feedforward network with the given parameters.
void Predict(arma::mat predictors, arma::mat &results)
Predict the responses to a given set of predictors.
BaseLayer< ActivationFunction, InputDataType, OutputDataType > CustomLayer
Standard Sigmoid layer.
Set the serialization version of the FFN class.
Definition: ffn.hpp:468
.hpp
Definition: add_to_po.hpp:21
This visitor is to support copy constructor for neural network module.
void Add(Args... args)
Definition: ffn.hpp:260
const arma::mat & Predictors() const
Get the matrix of data points (predictors).
Definition: ffn.hpp:283
The core includes that mlpack expects; standard C++ includes and Armadillo.
WeightSizeVisitor returns the number of weights of the given module.
void Forward(arma::mat inputs, arma::mat &results)
Perform the forward pass of the data in real batch mode.
void Train(arma::mat predictors, arma::mat responses, OptimizerType &optimizer)
Train the feedforward network on the given input data using the given optimizer.
FFN & operator=(FFN)
Copy/move assignment operator.
void Shuffle()
Shuffle the order of function visitation.
~FFN()
Destructor to release allocated memory.
const arma::mat & Responses() const
Get the matrix of responses to the input data points.
Definition: ffn.hpp:278
ResetVisitor executes the Reset() function.
double EvaluateWithGradient(const arma::mat &parameters, GradType &gradient)
Evaluate the feedforward network with the given parameters.
OutputParameterVisitor exposes the output parameter of the given module.
void Add(LayerTypes< CustomLayers... > layer)
Definition: ffn.hpp:267
arma::mat & Parameters()
Modify the initial point for the optimization.
Definition: ffn.hpp:275
void ResetParameters()
Reset the module infomration (weights/parameters).
arma::mat & Predictors()
Modify the matrix of data points (predictors).
Definition: ffn.hpp:285
const arma::mat & Parameters() const
Return the initial point for the optimization.
Definition: ffn.hpp:273
DeltaVisitor exposes the delta parameter of the given module.
Implementation of a standard feed forward network.
Definition: ffn.hpp:50
OutputWidthVisitor exposes the OutputWidth() method of the given module.
double Backward(arma::mat targets, arma::mat &gradients)
Perform the backward pass of the data in real batch mode.
FFN(OutputLayerType outputLayer=OutputLayerType(), InitializationRuleType initializeRule=InitializationRuleType())
Create the FFN object.