12 #ifndef MLPACK_METHODS_ANN_LAYER_RECURRENT_ATTENTION_HPP 13 #define MLPACK_METHODS_ANN_LAYER_RECURRENT_ATTENTION_HPP 16 #include <boost/ptr_container/ptr_vector.hpp> 18 #include "../visitor/delta_visitor.hpp" 19 #include "../visitor/output_parameter_visitor.hpp" 20 #include "../visitor/reset_visitor.hpp" 21 #include "../visitor/weight_size_visitor.hpp" 52 typename InputDataType = arma::mat,
53 typename OutputDataType = arma::mat
55 class RecurrentAttention
72 template<
typename RNNModuleType,
typename ActionModuleType>
74 const RNNModuleType& rnn,
75 const ActionModuleType& action,
86 void Forward(arma::Mat<eT>&& input, arma::Mat<eT>&& output);
98 void Backward(
const arma::Mat<eT>&& ,
109 template<
typename eT>
115 std::vector<LayerTypes<>>&
Model() {
return network; }
123 OutputDataType
const&
Parameters()
const {
return parameters; }
138 OutputDataType
const&
Delta()
const {
return delta; }
140 OutputDataType&
Delta() {
return delta; }
143 OutputDataType
const&
Gradient()
const {
return gradient; }
150 template<
typename Archive>
151 void serialize(Archive& ar,
const unsigned int );
155 void IntermediateGradient()
157 intermediateGradient.zeros();
160 if (backwardStep == (rho - 1))
163 std::move(actionError)), actionModule);
168 outputParameterVisitor, actionModule)), std::move(actionError)),
174 outputParameterVisitor, rnnModule)), std::move(recurrentError)),
177 attentionGradient += intermediateGradient;
202 OutputDataType parameters;
205 std::vector<LayerTypes<>> network;
217 std::vector<arma::mat> feedbackOutputParameter;
220 std::vector<arma::mat> moduleOutputParameter;
223 OutputDataType delta;
226 OutputDataType gradient;
229 InputDataType inputParameter;
232 OutputDataType outputParameter;
235 arma::mat recurrentError;
238 arma::mat actionError;
241 arma::mat actionDelta;
247 arma::mat initialInput;
253 arma::mat attentionGradient;
256 arma::mat intermediateGradient;
263 #include "recurrent_attention_impl.hpp"
bool & Deterministic()
Modify the value of the deterministic parameter.
OutputDataType & Parameters()
Modify the parameters.
void serialize(Archive &ar, const unsigned int)
Serialize the layer.
InputDataType & InputParameter()
Modify the input parameter.
The core includes that mlpack expects; standard C++ includes and Armadillo.
WeightSizeVisitor returns the number of weights of the given module.
OutputDataType & Gradient()
Modify the gradient.
InputDataType const & InputParameter() const
Get the input parameter.
OutputDataType const & Delta() const
Get the delta.
OutputDataType const & Gradient() const
Get the gradient.
ResetVisitor executes the Reset() function.
OutputParameterVisitor exposes the output parameter of the given module.
OutputDataType & OutputParameter()
Modify the output parameter.
RecurrentAttention()
Default constructor: this will not give a usable RecurrentAttention object, so be sure to set all the...
OutputDataType & Delta()
Modify the delta.
SearchModeVisitor executes the Gradient() method of the given module using the input and delta parame...
OutputDataType const & OutputParameter() const
Get the output parameter.
DeltaVisitor exposes the delta parameter of the given module.
OutputDataType const & Parameters() const
Get the parameters.
boost::variant< Add< arma::mat, arma::mat > *, AddMerge< arma::mat, arma::mat > *, BaseLayer< LogisticFunction, arma::mat, arma::mat > *, BaseLayer< IdentityFunction, arma::mat, arma::mat > *, BaseLayer< TanhFunction, arma::mat, arma::mat > *, BaseLayer< RectifierFunction, arma::mat, arma::mat > *, BatchNorm< arma::mat, arma::mat > *, BilinearInterpolation< arma::mat, arma::mat > *, Concat< arma::mat, arma::mat > *, ConcatPerformance< NegativeLogLikelihood< arma::mat, arma::mat >, arma::mat, arma::mat > *, Constant< arma::mat, arma::mat > *, Convolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, CrossEntropyError< arma::mat, arma::mat > *, DropConnect< arma::mat, arma::mat > *, Dropout< arma::mat, arma::mat > *, ELU< arma::mat, arma::mat > *, Glimpse< arma::mat, arma::mat > *, HardTanH< arma::mat, arma::mat > *, Join< arma::mat, arma::mat > *, LeakyReLU< arma::mat, arma::mat > *, Linear< arma::mat, arma::mat > *, LinearNoBias< arma::mat, arma::mat > *, LogSoftMax< arma::mat, arma::mat > *, Lookup< arma::mat, arma::mat > *, LSTM< arma::mat, arma::mat > *, GRU< arma::mat, arma::mat > *, FastLSTM< arma::mat, arma::mat > *, MaxPooling< arma::mat, arma::mat > *, MeanPooling< arma::mat, arma::mat > *, MeanSquaredError< arma::mat, arma::mat > *, MultiplyConstant< arma::mat, arma::mat > *, NegativeLogLikelihood< arma::mat, arma::mat > *, PReLU< arma::mat, arma::mat > *, Recurrent< arma::mat, arma::mat > *, RecurrentAttention< arma::mat, arma::mat > *, ReinforceNormal< arma::mat, arma::mat > *, SigmoidCrossEntropyError< arma::mat, arma::mat > *, Select< arma::mat, arma::mat > *, Sequential< arma::mat, arma::mat > *, VRClassReward< arma::mat, arma::mat > *, CustomLayers *... > LayerTypes
std::vector< LayerTypes<> > & Model()
Get the model modules.
void Backward(const arma::Mat< eT > &&, arma::Mat< eT > &&gy, arma::Mat< eT > &&g)
Ordinary feed backward pass of a neural network, calculating the function f(x) by propagating x backw...
void Forward(arma::Mat< eT > &&input, arma::Mat< eT > &&output)
Ordinary feed forward pass of a neural network, evaluating the function f(x) by propagating the activ...
bool Deterministic() const
The value of the deterministic parameter.