layer_types.hpp
Go to the documentation of this file.
1 
12 #ifndef MLPACK_METHODS_ANN_LAYER_LAYER_TYPES_HPP
13 #define MLPACK_METHODS_ANN_LAYER_LAYER_TYPES_HPP
14 
15 #include <boost/variant.hpp>
16 
17 // Layer modules.
40 
41 // Convolution modules.
45 
46 namespace mlpack {
47 namespace ann {
48 
49 
50 template<typename InputDataType, typename OutputDataType> class BatchNorm;
51 template<typename InputDataType, typename OutputDataType> class DropConnect;
52 template<typename InputDataType, typename OutputDataType> class Glimpse;
53 template<typename InputDataType, typename OutputDataType> class Linear;
54 template<typename InputDataType, typename OutputDataType> class LinearNoBias;
55 template<typename InputDataType, typename OutputDataType> class LSTM;
56 template<typename InputDataType, typename OutputDataType> class GRU;
57 template<typename InputDataType, typename OutputDataType> class FastLSTM;
58 template<typename InputDataType, typename OutputDataType> class VRClassReward;
59 
60 template<typename InputDataType,
61  typename OutputDataType,
62  typename... CustomLayers
63 >
64 class AddMerge;
65 
66 template<typename InputDataType,
67  typename OutputDataType,
68  typename... CustomLayers
69 >
70 class Sequential;
71 
72 template<typename InputDataType,
73  typename OutputDataType,
74  typename... CustomLayers
75 >
76 class Recurrent;
77 
78 template<typename InputDataType,
79  typename OutputDataType,
80  typename... CustomLayers
81 >
82 class Concat;
83 
84 template<
85  typename OutputLayerType,
86  typename InputDataType,
87  typename OutputDataType
88 >
89 class ConcatPerformance;
90 
91 template<
92  typename ForwardConvolutionRule,
93  typename BackwardConvolutionRule,
94  typename GradientConvolutionRule,
95  typename InputDataType,
96  typename OutputDataType
97 >
98 class Convolution;
99 
100 template<
101  typename InputDataType,
102  typename OutputDataType
103 >
105 
106 template <typename... CustomLayers>
107 using LayerTypes = boost::variant<
118  arma::mat, arma::mat>*,
122  NaiveConvolution<ValidConvolution>, arma::mat, arma::mat>*,
151  CustomLayers*...
152 >;
153 
154 } // namespace ann
155 } // namespace mlpack
156 
157 #endif
Implementation of the variance reduced classification reinforcement layer.
Definition: layer_types.hpp:58
Implementation of the Add module class.
Definition: add.hpp:34
Implementation of the log softmax layer.
Definition: log_softmax.hpp:36
Implementation of the AddMerge module class.
Definition: add_merge.hpp:42
.hpp
Definition: add_to_po.hpp:21
Implementation of the reinforce normal layer.
Implementation of the Linear layer class.
Definition: layer_types.hpp:53
The LeakyReLU activation function, defined by.
Definition: leaky_relu.hpp:44
This class implements the Recurrent Model for Visual Attention, using a variety of possible layer imp...
Implementation of the Convolution class.
Definition: convolution.hpp:46
Implementation of the MeanPooling.
Implementation of the Join module class.
Definition: join.hpp:33
Implementation of the concat performance class.
The Hard Tanh activation function, defined by.
Definition: hard_tanh.hpp:49
The select module selects the specified column from a given input matrix.
Definition: select.hpp:32
Implementation of the negative log likelihood layer.
The PReLU activation function, defined by (where alpha is trainable)
Implementation of the base layer.
Definition: base_layer.hpp:47
Implementation of the Concat class.
Definition: concat.hpp:44
An implementation of a lstm network layer.
Definition: layer_types.hpp:55
Implementation of the Lookup class.
Definition: lookup.hpp:35
Implementation of the LinearNoBias class.
Definition: layer_types.hpp:54
Computes the two-dimensional convolution.
An implementation of a gru network layer.
Definition: gru.hpp:57
The dropout layer is a regularizer that randomly with probability ratio sets input values to zero and...
Definition: dropout.hpp:54
The glimpse layer returns a retina-like representation (down-scaled cropped images) of increasing sca...
Definition: glimpse.hpp:87
The DropConnect layer is a regularizer that randomly with probability ratio sets the connection value...
Definition: dropconnect.hpp:62
The mean squared error performance function measures the network&#39;s performance according to the mean ...
Implementation of the multiply constant layer.
The cross-entropy performance function measures the network&#39;s performance according to the cross-entr...
Declaration of the Batch Normalization layer class.
Definition: batch_norm.hpp:57
Implementation of the RecurrentLayer class.
Definition: layer_types.hpp:76
boost::variant< Add< arma::mat, arma::mat > *, AddMerge< arma::mat, arma::mat > *, BaseLayer< LogisticFunction, arma::mat, arma::mat > *, BaseLayer< IdentityFunction, arma::mat, arma::mat > *, BaseLayer< TanhFunction, arma::mat, arma::mat > *, BaseLayer< RectifierFunction, arma::mat, arma::mat > *, BatchNorm< arma::mat, arma::mat > *, BilinearInterpolation< arma::mat, arma::mat > *, Concat< arma::mat, arma::mat > *, ConcatPerformance< NegativeLogLikelihood< arma::mat, arma::mat >, arma::mat, arma::mat > *, Constant< arma::mat, arma::mat > *, Convolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, CrossEntropyError< arma::mat, arma::mat > *, DropConnect< arma::mat, arma::mat > *, Dropout< arma::mat, arma::mat > *, ELU< arma::mat, arma::mat > *, Glimpse< arma::mat, arma::mat > *, HardTanH< arma::mat, arma::mat > *, Join< arma::mat, arma::mat > *, LeakyReLU< arma::mat, arma::mat > *, Linear< arma::mat, arma::mat > *, LinearNoBias< arma::mat, arma::mat > *, LogSoftMax< arma::mat, arma::mat > *, Lookup< arma::mat, arma::mat > *, LSTM< arma::mat, arma::mat > *, GRU< arma::mat, arma::mat > *, FastLSTM< arma::mat, arma::mat > *, MaxPooling< arma::mat, arma::mat > *, MeanPooling< arma::mat, arma::mat > *, MeanSquaredError< arma::mat, arma::mat > *, MultiplyConstant< arma::mat, arma::mat > *, NegativeLogLikelihood< arma::mat, arma::mat > *, PReLU< arma::mat, arma::mat > *, Recurrent< arma::mat, arma::mat > *, RecurrentAttention< arma::mat, arma::mat > *, ReinforceNormal< arma::mat, arma::mat > *, SigmoidCrossEntropyError< arma::mat, arma::mat > *, Select< arma::mat, arma::mat > *, Sequential< arma::mat, arma::mat > *, VRClassReward< arma::mat, arma::mat > *, CustomLayers *... > LayerTypes
Implementation of the Sequential class.
Definition: layer_types.hpp:70
Implementation of the constant layer.
Definition: constant.hpp:34
Implementation of the MaxPooling layer.
Definition: max_pooling.hpp:52
The ELU activation function, defined by.
Definition: elu.hpp:61
The SigmoidCrossEntropyError performance function measures the network&#39;s performance according to the...
Definition and Implementation of the Bilinear Interpolation Layer.
An implementation of a faster version of the Fast LSTM network layer.
Definition: fast_lstm.hpp:61