\section{mlpack\+:\+:ann Namespace Reference}
\label{namespacemlpack_1_1ann}\index{mlpack\+::ann@{mlpack\+::ann}}


Artificial Neural Network.  


\subsection*{Namespaces}
\begin{DoxyCompactItemize}
\item 
 \textbf{ augmented}
\end{DoxyCompactItemize}
\subsection*{Classes}
\begin{DoxyCompactItemize}
\item 
class \textbf{ Add}
\begin{DoxyCompactList}\small\item\em Implementation of the \doxyref{Add}{p.}{classmlpack_1_1ann_1_1Add} module class. \end{DoxyCompactList}\item 
class \textbf{ Add\+Merge}
\begin{DoxyCompactList}\small\item\em Implementation of the \doxyref{Add\+Merge}{p.}{classmlpack_1_1ann_1_1AddMerge} module class. \end{DoxyCompactList}\item 
class \textbf{ Add\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Add\+Visitor}{p.}{classmlpack_1_1ann_1_1AddVisitor} exposes the Add() method of the given module. \end{DoxyCompactList}\item 
class \textbf{ Alpha\+Dropout}
\begin{DoxyCompactList}\small\item\em The alpha -\/ dropout layer is a regularizer that randomly with probability \textquotesingle{}ratio\textquotesingle{} sets input values to alpha\+Dash. \end{DoxyCompactList}\item 
class \textbf{ Atrous\+Convolution}
\begin{DoxyCompactList}\small\item\em Implementation of the Atrous \doxyref{Convolution}{p.}{classmlpack_1_1ann_1_1Convolution} class. \end{DoxyCompactList}\item 
class \textbf{ Backward\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Backward\+Visitor}{p.}{classmlpack_1_1ann_1_1BackwardVisitor} executes the Backward() function given the input, error and delta parameter. \end{DoxyCompactList}\item 
class \textbf{ Base\+Layer}
\begin{DoxyCompactList}\small\item\em Implementation of the base layer. \end{DoxyCompactList}\item 
class \textbf{ Batch\+Norm}
\begin{DoxyCompactList}\small\item\em Declaration of the Batch Normalization layer class. \end{DoxyCompactList}\item 
class \textbf{ Bernoulli\+Distribution}
\begin{DoxyCompactList}\small\item\em Multiple independent Bernoulli distributions. \end{DoxyCompactList}\item 
class \textbf{ Bias\+Set\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Bias\+Set\+Visitor}{p.}{classmlpack_1_1ann_1_1BiasSetVisitor} updates the module bias parameters given the parameters set. \end{DoxyCompactList}\item 
class \textbf{ Bilinear\+Interpolation}
\begin{DoxyCompactList}\small\item\em Definition and Implementation of the Bilinear Interpolation Layer. \end{DoxyCompactList}\item 
class \textbf{ Binary\+R\+BM}
\begin{DoxyCompactList}\small\item\em For more information, see the following paper\+: \end{DoxyCompactList}\item 
class \textbf{ B\+R\+NN}
\begin{DoxyCompactList}\small\item\em Implementation of a standard bidirectional recurrent neural network container. \end{DoxyCompactList}\item 
class \textbf{ C\+E\+LU}
\begin{DoxyCompactList}\small\item\em The \doxyref{C\+E\+LU}{p.}{classmlpack_1_1ann_1_1CELU} activation function, defined by. \end{DoxyCompactList}\item 
class \textbf{ Concat}
\begin{DoxyCompactList}\small\item\em Implementation of the \doxyref{Concat}{p.}{classmlpack_1_1ann_1_1Concat} class. \end{DoxyCompactList}\item 
class \textbf{ Concatenate}
\begin{DoxyCompactList}\small\item\em Implementation of the \doxyref{Concatenate}{p.}{classmlpack_1_1ann_1_1Concatenate} module class. \end{DoxyCompactList}\item 
class \textbf{ Concat\+Performance}
\begin{DoxyCompactList}\small\item\em Implementation of the concat performance class. \end{DoxyCompactList}\item 
class \textbf{ Constant}
\begin{DoxyCompactList}\small\item\em Implementation of the constant layer. \end{DoxyCompactList}\item 
class \textbf{ Const\+Initialization}
\begin{DoxyCompactList}\small\item\em This class is used to initialize weight matrix with constant values. \end{DoxyCompactList}\item 
class \textbf{ Convolution}
\begin{DoxyCompactList}\small\item\em Implementation of the \doxyref{Convolution}{p.}{classmlpack_1_1ann_1_1Convolution} class. \end{DoxyCompactList}\item 
class \textbf{ Copy\+Visitor}
\begin{DoxyCompactList}\small\item\em This visitor is to support copy constructor for neural network module. \end{DoxyCompactList}\item 
class \textbf{ Cosine\+Embedding\+Loss}
\begin{DoxyCompactList}\small\item\em Cosine Embedding Loss function is used for measuring whether two inputs are similar or dissimilar, using the cosine distance, and is typically used for learning nonlinear embeddings or semi-\/supervised learning. \end{DoxyCompactList}\item 
class \textbf{ C\+Re\+LU}
\begin{DoxyCompactList}\small\item\em A concatenated Re\+LU has two outputs, one Re\+LU and one negative Re\+LU, concatenated together. \end{DoxyCompactList}\item 
class \textbf{ Cross\+Entropy\+Error}
\begin{DoxyCompactList}\small\item\em The cross-\/entropy performance function measures the network\textquotesingle{}s performance according to the cross-\/entropy between the input and target distributions. \end{DoxyCompactList}\item 
class \textbf{ D\+C\+G\+AN}
\begin{DoxyCompactList}\small\item\em For more information, see the following paper\+: \end{DoxyCompactList}\item 
class \textbf{ Delete\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Delete\+Visitor}{p.}{classmlpack_1_1ann_1_1DeleteVisitor} executes the destructor of the instantiated object. \end{DoxyCompactList}\item 
class \textbf{ Delta\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Delta\+Visitor}{p.}{classmlpack_1_1ann_1_1DeltaVisitor} exposes the delta parameter of the given module. \end{DoxyCompactList}\item 
class \textbf{ Deterministic\+Set\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Deterministic\+Set\+Visitor}{p.}{classmlpack_1_1ann_1_1DeterministicSetVisitor} set the deterministic parameter given the deterministic value. \end{DoxyCompactList}\item 
class \textbf{ Dice\+Loss}
\begin{DoxyCompactList}\small\item\em The dice loss performance function measures the network\textquotesingle{}s performance according to the dice coefficient between the input and target distributions. \end{DoxyCompactList}\item 
class \textbf{ Drop\+Connect}
\begin{DoxyCompactList}\small\item\em The \doxyref{Drop\+Connect}{p.}{classmlpack_1_1ann_1_1DropConnect} layer is a regularizer that randomly with probability ratio sets the connection values to zero and scales the remaining elements by factor 1 /(1 -\/ ratio). \end{DoxyCompactList}\item 
class \textbf{ Dropout}
\begin{DoxyCompactList}\small\item\em The dropout layer is a regularizer that randomly with probability \textquotesingle{}ratio\textquotesingle{} sets input values to zero and scales the remaining elements by factor 1 / (1 -\/ ratio) rather than during test time so as to keep the expected sum same. \end{DoxyCompactList}\item 
class \textbf{ Earth\+Mover\+Distance}
\begin{DoxyCompactList}\small\item\em The earth mover distance function measures the network\textquotesingle{}s performance according to the Kantorovich-\/\+Rubinstein duality approximation. \end{DoxyCompactList}\item 
class \textbf{ Elish\+Function}
\begin{DoxyCompactList}\small\item\em The E\+Li\+SH function, defined by. \end{DoxyCompactList}\item 
class \textbf{ E\+LU}
\begin{DoxyCompactList}\small\item\em The \doxyref{E\+LU}{p.}{classmlpack_1_1ann_1_1ELU} activation function, defined by. \end{DoxyCompactList}\item 
class \textbf{ Fast\+L\+S\+TM}
\begin{DoxyCompactList}\small\item\em An implementation of a faster version of the Fast \doxyref{L\+S\+TM}{p.}{classmlpack_1_1ann_1_1LSTM} network layer. \end{DoxyCompactList}\item 
class \textbf{ F\+FN}
\begin{DoxyCompactList}\small\item\em Implementation of a standard feed forward network. \end{DoxyCompactList}\item 
class \textbf{ F\+F\+T\+Convolution}
\begin{DoxyCompactList}\small\item\em Computes the two-\/dimensional convolution through fft. \end{DoxyCompactList}\item 
class \textbf{ Flexible\+Re\+LU}
\begin{DoxyCompactList}\small\item\em The \doxyref{Flexible\+Re\+LU}{p.}{classmlpack_1_1ann_1_1FlexibleReLU} activation function, defined by. \end{DoxyCompactList}\item 
class \textbf{ Forward\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Forward\+Visitor}{p.}{classmlpack_1_1ann_1_1ForwardVisitor} executes the Forward() function given the input and output parameter. \end{DoxyCompactList}\item 
class \textbf{ Full\+Convolution}
\item 
class \textbf{ G\+AN}
\begin{DoxyCompactList}\small\item\em The implementation of the standard \doxyref{G\+AN}{p.}{classmlpack_1_1ann_1_1GAN} module. \end{DoxyCompactList}\item 
class \textbf{ Gaussian\+Initialization}
\begin{DoxyCompactList}\small\item\em This class is used to initialize weigth matrix with a gaussian. \end{DoxyCompactList}\item 
class \textbf{ G\+E\+L\+U\+Function}
\begin{DoxyCompactList}\small\item\em The G\+E\+LU function, defined by. \end{DoxyCompactList}\item 
class \textbf{ Glimpse}
\begin{DoxyCompactList}\small\item\em The glimpse layer returns a retina-\/like representation (down-\/scaled cropped images) of increasing scale around a given location in a given image. \end{DoxyCompactList}\item 
class \textbf{ Glorot\+Initialization\+Type}
\begin{DoxyCompactList}\small\item\em This class is used to initialize the weight matrix with the Glorot Initialization method. \end{DoxyCompactList}\item 
class \textbf{ Gradient\+Set\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Gradient\+Set\+Visitor}{p.}{classmlpack_1_1ann_1_1GradientSetVisitor} update the gradient parameter given the gradient set. \end{DoxyCompactList}\item 
class \textbf{ Gradient\+Update\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Gradient\+Update\+Visitor}{p.}{classmlpack_1_1ann_1_1GradientUpdateVisitor} update the gradient parameter given the gradient set. \end{DoxyCompactList}\item 
class \textbf{ Gradient\+Visitor}
\begin{DoxyCompactList}\small\item\em Search\+Mode\+Visitor executes the Gradient() method of the given module using the input and delta parameter. \end{DoxyCompactList}\item 
class \textbf{ Gradient\+Zero\+Visitor}
\item 
class \textbf{ G\+RU}
\begin{DoxyCompactList}\small\item\em An implementation of a gru network layer. \end{DoxyCompactList}\item 
class \textbf{ Hard\+Shrink}
\begin{DoxyCompactList}\small\item\em Hard Shrink operator is defined as, \begin{eqnarray*} f(x) &=& \left\{ \begin{array}{lr} x & : x > lambda \\ x & : x < -lambda \\ 0 & : otherwise \end{array} \\ \right. f'(x) &=& \left\{ \begin{array}{lr} 1 & : x > lambda \\ 1 & : x < -lambda \\ 0 & : otherwise \end{array} \right. \end{eqnarray*} lambda is set to 0.\+5 by default. \end{DoxyCompactList}\item 
class \textbf{ Hard\+Sigmoid\+Function}
\begin{DoxyCompactList}\small\item\em The hard sigmoid function, defined by. \end{DoxyCompactList}\item 
class \textbf{ Hard\+TanH}
\begin{DoxyCompactList}\small\item\em The Hard Tanh activation function, defined by. \end{DoxyCompactList}\item 
class \textbf{ He\+Initialization}
\begin{DoxyCompactList}\small\item\em This class is used to initialize weight matrix with the He initialization rule given by He et. \end{DoxyCompactList}\item 
class \textbf{ Highway}
\begin{DoxyCompactList}\small\item\em Implementation of the \doxyref{Highway}{p.}{classmlpack_1_1ann_1_1Highway} layer. \end{DoxyCompactList}\item 
class \textbf{ Hinge\+Embedding\+Loss}
\begin{DoxyCompactList}\small\item\em The Hinge Embedding loss function is often used to compute the loss between y\+\_\+true and y\+\_\+pred. \end{DoxyCompactList}\item 
class \textbf{ Huber\+Loss}
\begin{DoxyCompactList}\small\item\em The Huber loss is a loss function used in robust regression, that is less sensitive to outliers in data than the squared error loss. \end{DoxyCompactList}\item 
class \textbf{ Identity\+Function}
\begin{DoxyCompactList}\small\item\em The identity function, defined by. \end{DoxyCompactList}\item 
class \textbf{ Init\+Traits}
\begin{DoxyCompactList}\small\item\em This is a template class that can provide information about various initialization methods. \end{DoxyCompactList}\item 
class \textbf{ Init\+Traits$<$ Kathirvalavakumar\+Subavathi\+Initialization $>$}
\begin{DoxyCompactList}\small\item\em Initialization traits of the kathirvalavakumar subavath initialization rule. \end{DoxyCompactList}\item 
class \textbf{ Init\+Traits$<$ Nguyen\+Widrow\+Initialization $>$}
\begin{DoxyCompactList}\small\item\em Initialization traits of the Nguyen-\/\+Widrow initialization rule. \end{DoxyCompactList}\item 
class \textbf{ Join}
\begin{DoxyCompactList}\small\item\em Implementation of the \doxyref{Join}{p.}{classmlpack_1_1ann_1_1Join} module class. \end{DoxyCompactList}\item 
class \textbf{ Kathirvalavakumar\+Subavathi\+Initialization}
\begin{DoxyCompactList}\small\item\em This class is used to initialize the weight matrix with the method proposed by T. \end{DoxyCompactList}\item 
class \textbf{ K\+L\+Divergence}
\begin{DoxyCompactList}\small\item\em The Kullback–\+Leibler divergence is often used for continuous distributions (direct regression). \end{DoxyCompactList}\item 
class \textbf{ L1\+Loss}
\begin{DoxyCompactList}\small\item\em The L1 loss is a loss function that measures the mean absolute error (M\+AE) between each element in the input x and target y. \end{DoxyCompactList}\item 
class \textbf{ Layer\+Norm}
\begin{DoxyCompactList}\small\item\em Declaration of the Layer Normalization class. \end{DoxyCompactList}\item 
class \textbf{ Layer\+Traits}
\begin{DoxyCompactList}\small\item\em This is a template class that can provide information about various layers. \end{DoxyCompactList}\item 
class \textbf{ Leaky\+Re\+LU}
\begin{DoxyCompactList}\small\item\em The \doxyref{Leaky\+Re\+LU}{p.}{classmlpack_1_1ann_1_1LeakyReLU} activation function, defined by. \end{DoxyCompactList}\item 
class \textbf{ Lecun\+Normal\+Initialization}
\begin{DoxyCompactList}\small\item\em This class is used to initialize weight matrix with the Lecun Normalization initialization rule. \end{DoxyCompactList}\item 
class \textbf{ Linear}
\begin{DoxyCompactList}\small\item\em Implementation of the \doxyref{Linear}{p.}{classmlpack_1_1ann_1_1Linear} layer class. \end{DoxyCompactList}\item 
class \textbf{ Linear\+No\+Bias}
\begin{DoxyCompactList}\small\item\em Implementation of the \doxyref{Linear\+No\+Bias}{p.}{classmlpack_1_1ann_1_1LinearNoBias} class. \end{DoxyCompactList}\item 
class \textbf{ Li\+S\+H\+T\+Function}
\begin{DoxyCompactList}\small\item\em The Li\+S\+HT function, defined by. \end{DoxyCompactList}\item 
class \textbf{ Load\+Output\+Parameter\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Load\+Output\+Parameter\+Visitor}{p.}{classmlpack_1_1ann_1_1LoadOutputParameterVisitor} restores the output parameter using the given parameter set. \end{DoxyCompactList}\item 
class \textbf{ Log\+Cosh\+Loss}
\begin{DoxyCompactList}\small\item\em The Log-\/\+Hyperbolic-\/\+Cosine loss function is often used to improve variational auto encoder. \end{DoxyCompactList}\item 
class \textbf{ Logistic\+Function}
\begin{DoxyCompactList}\small\item\em The logistic function, defined by. \end{DoxyCompactList}\item 
class \textbf{ Log\+Soft\+Max}
\begin{DoxyCompactList}\small\item\em Implementation of the log softmax layer. \end{DoxyCompactList}\item 
class \textbf{ Lookup}
\begin{DoxyCompactList}\small\item\em Implementation of the \doxyref{Lookup}{p.}{classmlpack_1_1ann_1_1Lookup} class. \end{DoxyCompactList}\item 
class \textbf{ Loss\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Loss\+Visitor}{p.}{classmlpack_1_1ann_1_1LossVisitor} exposes the Loss() method of the given module. \end{DoxyCompactList}\item 
class \textbf{ L\+Regularizer}
\begin{DoxyCompactList}\small\item\em The L\+\_\+p regularizer for arbitrary integer p. \end{DoxyCompactList}\item 
class \textbf{ L\+S\+TM}
\begin{DoxyCompactList}\small\item\em Implementation of the \doxyref{L\+S\+TM}{p.}{classmlpack_1_1ann_1_1LSTM} module class. \end{DoxyCompactList}\item 
class \textbf{ Margin\+Ranking\+Loss}
\begin{DoxyCompactList}\small\item\em Margin ranking loss measures the loss given inputs and a label vector with values of 1 or -\/1. \end{DoxyCompactList}\item 
class \textbf{ Max\+Pooling}
\begin{DoxyCompactList}\small\item\em Implementation of the \doxyref{Max\+Pooling}{p.}{classmlpack_1_1ann_1_1MaxPooling} layer. \end{DoxyCompactList}\item 
class \textbf{ Max\+Pooling\+Rule}
\item 
class \textbf{ Mean\+Bias\+Error}
\begin{DoxyCompactList}\small\item\em The mean bias error performance function measures the network\textquotesingle{}s performance according to the mean of errors. \end{DoxyCompactList}\item 
class \textbf{ Mean\+Pooling}
\begin{DoxyCompactList}\small\item\em Implementation of the \doxyref{Mean\+Pooling}{p.}{classmlpack_1_1ann_1_1MeanPooling}. \end{DoxyCompactList}\item 
class \textbf{ Mean\+Pooling\+Rule}
\item 
class \textbf{ Mean\+Squared\+Error}
\begin{DoxyCompactList}\small\item\em The mean squared error performance function measures the network\textquotesingle{}s performance according to the mean of squared errors. \end{DoxyCompactList}\item 
class \textbf{ Mean\+Squared\+Logarithmic\+Error}
\begin{DoxyCompactList}\small\item\em The mean squared logarithmic error performance function measures the network\textquotesingle{}s performance according to the mean of squared logarithmic errors. \end{DoxyCompactList}\item 
class \textbf{ Mini\+Batch\+Discrimination}
\begin{DoxyCompactList}\small\item\em Implementation of the \doxyref{Mini\+Batch\+Discrimination}{p.}{classmlpack_1_1ann_1_1MiniBatchDiscrimination} layer. \end{DoxyCompactList}\item 
class \textbf{ Multiply\+Constant}
\begin{DoxyCompactList}\small\item\em Implementation of the multiply constant layer. \end{DoxyCompactList}\item 
class \textbf{ Multiply\+Merge}
\begin{DoxyCompactList}\small\item\em Implementation of the \doxyref{Multiply\+Merge}{p.}{classmlpack_1_1ann_1_1MultiplyMerge} module class. \end{DoxyCompactList}\item 
class \textbf{ Naive\+Convolution}
\begin{DoxyCompactList}\small\item\em Computes the two-\/dimensional convolution. \end{DoxyCompactList}\item 
class \textbf{ Negative\+Log\+Likelihood}
\begin{DoxyCompactList}\small\item\em Implementation of the negative log likelihood layer. \end{DoxyCompactList}\item 
class \textbf{ Network\+Initialization}
\begin{DoxyCompactList}\small\item\em This class is used to initialize the network with the given initialization rule. \end{DoxyCompactList}\item 
class \textbf{ Nguyen\+Widrow\+Initialization}
\begin{DoxyCompactList}\small\item\em This class is used to initialize the weight matrix with the Nguyen-\/\+Widrow method. \end{DoxyCompactList}\item 
class \textbf{ No\+Regularizer}
\begin{DoxyCompactList}\small\item\em Implementation of the \doxyref{No\+Regularizer}{p.}{classmlpack_1_1ann_1_1NoRegularizer}. \end{DoxyCompactList}\item 
class \textbf{ Oivs\+Initialization}
\begin{DoxyCompactList}\small\item\em This class is used to initialize the weight matrix with the oivs method. \end{DoxyCompactList}\item 
class \textbf{ Orthogonal\+Initialization}
\begin{DoxyCompactList}\small\item\em This class is used to initialize the weight matrix with the orthogonal matrix initialization. \end{DoxyCompactList}\item 
class \textbf{ Orthogonal\+Regularizer}
\begin{DoxyCompactList}\small\item\em Implementation of the \doxyref{Orthogonal\+Regularizer}{p.}{classmlpack_1_1ann_1_1OrthogonalRegularizer}. \end{DoxyCompactList}\item 
class \textbf{ Output\+Height\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Output\+Height\+Visitor}{p.}{classmlpack_1_1ann_1_1OutputHeightVisitor} exposes the Output\+Height() method of the given module. \end{DoxyCompactList}\item 
class \textbf{ Output\+Parameter\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Output\+Parameter\+Visitor}{p.}{classmlpack_1_1ann_1_1OutputParameterVisitor} exposes the output parameter of the given module. \end{DoxyCompactList}\item 
class \textbf{ Output\+Width\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Output\+Width\+Visitor}{p.}{classmlpack_1_1ann_1_1OutputWidthVisitor} exposes the Output\+Width() method of the given module. \end{DoxyCompactList}\item 
class \textbf{ Padding}
\begin{DoxyCompactList}\small\item\em Implementation of the \doxyref{Padding}{p.}{classmlpack_1_1ann_1_1Padding} module class. \end{DoxyCompactList}\item 
class \textbf{ Parameters\+Set\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Parameters\+Set\+Visitor}{p.}{classmlpack_1_1ann_1_1ParametersSetVisitor} update the parameters set using the given matrix. \end{DoxyCompactList}\item 
class \textbf{ Parameters\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Parameters\+Visitor}{p.}{classmlpack_1_1ann_1_1ParametersVisitor} exposes the parameters set of the given module and stores the parameters set into the given matrix. \end{DoxyCompactList}\item 
class \textbf{ P\+Re\+LU}
\begin{DoxyCompactList}\small\item\em The \doxyref{P\+Re\+LU}{p.}{classmlpack_1_1ann_1_1PReLU} activation function, defined by (where alpha is trainable) \end{DoxyCompactList}\item 
class \textbf{ Random\+Initialization}
\begin{DoxyCompactList}\small\item\em This class is used to initialize randomly the weight matrix. \end{DoxyCompactList}\item 
class \textbf{ R\+BM}
\begin{DoxyCompactList}\small\item\em The implementation of the \doxyref{R\+BM}{p.}{classmlpack_1_1ann_1_1RBM} module. \end{DoxyCompactList}\item 
class \textbf{ Reconstruction\+Loss}
\begin{DoxyCompactList}\small\item\em The reconstruction loss performance function measures the network\textquotesingle{}s performance equal to the negative log probability of the target with the input distribution. \end{DoxyCompactList}\item 
class \textbf{ Rectifier\+Function}
\begin{DoxyCompactList}\small\item\em The rectifier function, defined by. \end{DoxyCompactList}\item 
class \textbf{ Recurrent}
\begin{DoxyCompactList}\small\item\em Implementation of the Recurrent\+Layer class. \end{DoxyCompactList}\item 
class \textbf{ Recurrent\+Attention}
\begin{DoxyCompactList}\small\item\em This class implements the \doxyref{Recurrent}{p.}{classmlpack_1_1ann_1_1Recurrent} Model for Visual Attention, using a variety of possible layer implementations. \end{DoxyCompactList}\item 
class \textbf{ Reinforce\+Normal}
\begin{DoxyCompactList}\small\item\em Implementation of the reinforce normal layer. \end{DoxyCompactList}\item 
class \textbf{ Reparametrization}
\begin{DoxyCompactList}\small\item\em Implementation of the \doxyref{Reparametrization}{p.}{classmlpack_1_1ann_1_1Reparametrization} layer class. \end{DoxyCompactList}\item 
class \textbf{ Reset\+Cell\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Reset\+Cell\+Visitor}{p.}{classmlpack_1_1ann_1_1ResetCellVisitor} executes the Reset\+Cell() function. \end{DoxyCompactList}\item 
class \textbf{ Reset\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Reset\+Visitor}{p.}{classmlpack_1_1ann_1_1ResetVisitor} executes the Reset() function. \end{DoxyCompactList}\item 
class \textbf{ Reward\+Set\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Reward\+Set\+Visitor}{p.}{classmlpack_1_1ann_1_1RewardSetVisitor} set the reward parameter given the reward value. \end{DoxyCompactList}\item 
class \textbf{ R\+NN}
\begin{DoxyCompactList}\small\item\em Implementation of a standard recurrent neural network container. \end{DoxyCompactList}\item 
class \textbf{ Run\+Set\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Run\+Set\+Visitor}{p.}{classmlpack_1_1ann_1_1RunSetVisitor} set the run parameter given the run value. \end{DoxyCompactList}\item 
class \textbf{ Save\+Output\+Parameter\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Save\+Output\+Parameter\+Visitor}{p.}{classmlpack_1_1ann_1_1SaveOutputParameterVisitor} saves the output parameter into the given parameter set. \end{DoxyCompactList}\item 
class \textbf{ Select}
\begin{DoxyCompactList}\small\item\em The select module selects the specified column from a given input matrix. \end{DoxyCompactList}\item 
class \textbf{ Sequential}
\begin{DoxyCompactList}\small\item\em Implementation of the \doxyref{Sequential}{p.}{classmlpack_1_1ann_1_1Sequential} class. \end{DoxyCompactList}\item 
class \textbf{ Set\+Input\+Height\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Set\+Input\+Height\+Visitor}{p.}{classmlpack_1_1ann_1_1SetInputHeightVisitor} updates the input height parameter with the given input height. \end{DoxyCompactList}\item 
class \textbf{ Set\+Input\+Width\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Set\+Input\+Width\+Visitor}{p.}{classmlpack_1_1ann_1_1SetInputWidthVisitor} updates the input width parameter with the given input width. \end{DoxyCompactList}\item 
class \textbf{ Sigmoid\+Cross\+Entropy\+Error}
\begin{DoxyCompactList}\small\item\em The \doxyref{Sigmoid\+Cross\+Entropy\+Error}{p.}{classmlpack_1_1ann_1_1SigmoidCrossEntropyError} performance function measures the network\textquotesingle{}s performance according to the cross-\/entropy function between the input and target distributions. \end{DoxyCompactList}\item 
class \textbf{ Softplus\+Function}
\begin{DoxyCompactList}\small\item\em The softplus function, defined by. \end{DoxyCompactList}\item 
class \textbf{ Soft\+Shrink}
\begin{DoxyCompactList}\small\item\em Soft Shrink operator is defined as, \begin{eqnarray*} f(x) &=& \left\{ \begin{array}{lr} x - lambda & : x > lambda \\ x + lambda & : x < -lambda \\ 0 & : otherwise \end{array} \\ \right. f'(x) &=& \left\{ \begin{array}{lr} 1 & : x > lambda \\ 1 & : x < -lambda \\ 0 & : otherwise \end{array} \right. \end{eqnarray*}. \end{DoxyCompactList}\item 
class \textbf{ Softsign\+Function}
\begin{DoxyCompactList}\small\item\em The softsign function, defined by. \end{DoxyCompactList}\item 
class \textbf{ Spike\+Slab\+R\+BM}
\begin{DoxyCompactList}\small\item\em For more information, see the following paper\+: \end{DoxyCompactList}\item 
class \textbf{ Standard\+G\+AN}
\begin{DoxyCompactList}\small\item\em For more information, see the following paper\+: \end{DoxyCompactList}\item 
class \textbf{ Subview}
\begin{DoxyCompactList}\small\item\em Implementation of the subview layer. \end{DoxyCompactList}\item 
class \textbf{ S\+V\+D\+Convolution}
\begin{DoxyCompactList}\small\item\em Computes the two-\/dimensional convolution using singular value decomposition. \end{DoxyCompactList}\item 
class \textbf{ Swish\+Function}
\begin{DoxyCompactList}\small\item\em The swish function, defined by. \end{DoxyCompactList}\item 
class \textbf{ Tanh\+Function}
\begin{DoxyCompactList}\small\item\em The tanh function, defined by. \end{DoxyCompactList}\item 
class \textbf{ Transposed\+Convolution}
\begin{DoxyCompactList}\small\item\em Implementation of the Transposed \doxyref{Convolution}{p.}{classmlpack_1_1ann_1_1Convolution} class. \end{DoxyCompactList}\item 
class \textbf{ Valid\+Convolution}
\item 
class \textbf{ Virtual\+Batch\+Norm}
\begin{DoxyCompactList}\small\item\em Declaration of the \doxyref{Virtual\+Batch\+Norm}{p.}{classmlpack_1_1ann_1_1VirtualBatchNorm} layer class. \end{DoxyCompactList}\item 
class \textbf{ V\+R\+Class\+Reward}
\begin{DoxyCompactList}\small\item\em Implementation of the variance reduced classification reinforcement layer. \end{DoxyCompactList}\item 
class \textbf{ Weight\+Norm}
\begin{DoxyCompactList}\small\item\em Declaration of the \doxyref{Weight\+Norm}{p.}{classmlpack_1_1ann_1_1WeightNorm} layer class. \end{DoxyCompactList}\item 
class \textbf{ Weight\+Set\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Weight\+Set\+Visitor}{p.}{classmlpack_1_1ann_1_1WeightSetVisitor} update the module parameters given the parameters set. \end{DoxyCompactList}\item 
class \textbf{ Weight\+Size\+Visitor}
\begin{DoxyCompactList}\small\item\em \doxyref{Weight\+Size\+Visitor}{p.}{classmlpack_1_1ann_1_1WeightSizeVisitor} returns the number of weights of the given module. \end{DoxyCompactList}\item 
class \textbf{ W\+G\+AN}
\begin{DoxyCompactList}\small\item\em For more information, see the following paper\+: \end{DoxyCompactList}\item 
class \textbf{ W\+G\+A\+N\+GP}
\begin{DoxyCompactList}\small\item\em For more information, see the following paper\+: \end{DoxyCompactList}\end{DoxyCompactItemize}
\subsection*{Typedefs}
\begin{DoxyCompactItemize}
\item 
{\footnotesize template$<$class Activation\+Function  = Logistic\+Function, typename Input\+Data\+Type  = arma\+::mat, typename Output\+Data\+Type  = arma\+::mat$>$ }\\using \textbf{ Custom\+Layer} = \textbf{ Base\+Layer}$<$ Activation\+Function, Input\+Data\+Type, Output\+Data\+Type $>$
\begin{DoxyCompactList}\small\item\em Standard Sigmoid layer. \end{DoxyCompactList}\item 
{\footnotesize template$<$class Activation\+Function  = Elish\+Function, typename Input\+Data\+Type  = arma\+::mat, typename Output\+Data\+Type  = arma\+::mat$>$ }\\using \textbf{ Elish\+Function\+Layer} = \textbf{ Base\+Layer}$<$ Activation\+Function, Input\+Data\+Type, Output\+Data\+Type $>$
\begin{DoxyCompactList}\small\item\em Standard E\+Li\+S\+H-\/\+Layer using the E\+Li\+SH activation function. \end{DoxyCompactList}\item 
{\footnotesize template$<$typename Mat\+Type  = arma\+::mat$>$ }\\using \textbf{ Embedding} = \textbf{ Lookup}$<$ Mat\+Type, Mat\+Type $>$
\item 
{\footnotesize template$<$class Activation\+Function  = G\+E\+L\+U\+Function, typename Input\+Data\+Type  = arma\+::mat, typename Output\+Data\+Type  = arma\+::mat$>$ }\\using \textbf{ G\+E\+L\+U\+Function\+Layer} = \textbf{ Base\+Layer}$<$ Activation\+Function, Input\+Data\+Type, Output\+Data\+Type $>$
\begin{DoxyCompactList}\small\item\em Standard G\+E\+L\+U-\/\+Layer using the G\+E\+LU activation function. \end{DoxyCompactList}\item 
using \textbf{ Glorot\+Initialization} = \textbf{ Glorot\+Initialization\+Type}$<$ false $>$
\begin{DoxyCompactList}\small\item\em Glorot\+Initialization uses uniform distribution. \end{DoxyCompactList}\item 
{\footnotesize template$<$class Activation\+Function  = Hard\+Sigmoid\+Function, typename Input\+Data\+Type  = arma\+::mat, typename Output\+Data\+Type  = arma\+::mat$>$ }\\using \textbf{ Hard\+Sigmoid\+Layer} = \textbf{ Base\+Layer}$<$ Activation\+Function, Input\+Data\+Type, Output\+Data\+Type $>$
\begin{DoxyCompactList}\small\item\em Standard Hard\+Sigmoid-\/\+Layer using the Hard\+Sigmoid activation function. \end{DoxyCompactList}\item 
{\footnotesize template$<$class Activation\+Function  = Identity\+Function, typename Input\+Data\+Type  = arma\+::mat, typename Output\+Data\+Type  = arma\+::mat$>$ }\\using \textbf{ Identity\+Layer} = \textbf{ Base\+Layer}$<$ Activation\+Function, Input\+Data\+Type, Output\+Data\+Type $>$
\begin{DoxyCompactList}\small\item\em Standard Identity-\/\+Layer using the identity activation function. \end{DoxyCompactList}\item 
typedef \textbf{ L\+Regularizer}$<$ 1 $>$ \textbf{ L1\+Regularizer}
\begin{DoxyCompactList}\small\item\em The L1 Regularizer. \end{DoxyCompactList}\item 
typedef \textbf{ L\+Regularizer}$<$ 2 $>$ \textbf{ L2\+Regularizer}
\begin{DoxyCompactList}\small\item\em The L2 Regularizer. \end{DoxyCompactList}\item 
{\footnotesize template$<$typename... Custom\+Layers$>$ }\\using \textbf{ Layer\+Types} = boost\+::variant$<$ \textbf{ Add}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Add\+Merge}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Atrous\+Convolution}$<$ \textbf{ Naive\+Convolution}$<$ \textbf{ Valid\+Convolution} $>$, \textbf{ Naive\+Convolution}$<$ \textbf{ Full\+Convolution} $>$, \textbf{ Naive\+Convolution}$<$ \textbf{ Valid\+Convolution} $>$, arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Base\+Layer}$<$ \textbf{ Logistic\+Function}, arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Base\+Layer}$<$ \textbf{ Identity\+Function}, arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Base\+Layer}$<$ \textbf{ Tanh\+Function}, arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Base\+Layer}$<$ \textbf{ Rectifier\+Function}, arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Base\+Layer}$<$ \textbf{ Softplus\+Function}, arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Batch\+Norm}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Bilinear\+Interpolation}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Concat}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Concatenate}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Concat\+Performance}$<$ \textbf{ Negative\+Log\+Likelihood}$<$ arma\+::mat, arma\+::mat $>$, arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Constant}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Convolution}$<$ \textbf{ Naive\+Convolution}$<$ \textbf{ Valid\+Convolution} $>$, \textbf{ Naive\+Convolution}$<$ \textbf{ Full\+Convolution} $>$, \textbf{ Naive\+Convolution}$<$ \textbf{ Valid\+Convolution} $>$, arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Transposed\+Convolution}$<$ \textbf{ Naive\+Convolution}$<$ \textbf{ Valid\+Convolution} $>$, \textbf{ Naive\+Convolution}$<$ \textbf{ Valid\+Convolution} $>$, \textbf{ Naive\+Convolution}$<$ \textbf{ Valid\+Convolution} $>$, arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Drop\+Connect}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Dropout}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Alpha\+Dropout}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ E\+LU}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Flexible\+Re\+LU}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Glimpse}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Hard\+TanH}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Highway}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Join}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Layer\+Norm}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Leaky\+Re\+LU}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ C\+Re\+LU}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Linear}$<$ arma\+::mat, arma\+::mat, \textbf{ No\+Regularizer} $>$ $\ast$, \textbf{ Linear\+No\+Bias}$<$ arma\+::mat, arma\+::mat, \textbf{ No\+Regularizer} $>$ $\ast$, \textbf{ Log\+Soft\+Max}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Lookup}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ L\+S\+TM}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ G\+RU}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Fast\+L\+S\+TM}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Max\+Pooling}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Mean\+Pooling}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Mini\+Batch\+Discrimination}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Multiply\+Constant}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Multiply\+Merge}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Negative\+Log\+Likelihood}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Padding}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ P\+Re\+LU}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Weight\+Norm}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ C\+E\+LU}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ More\+Types}, Custom\+Layers $\ast$... $>$
\item 
{\footnotesize template$<$class Activation\+Function  = Li\+S\+H\+T\+Function, typename Input\+Data\+Type  = arma\+::mat, typename Output\+Data\+Type  = arma\+::mat$>$ }\\using \textbf{ Li\+S\+H\+T\+Function\+Layer} = \textbf{ Base\+Layer}$<$ Activation\+Function, Input\+Data\+Type, Output\+Data\+Type $>$
\begin{DoxyCompactList}\small\item\em Standard Li\+S\+H\+T-\/\+Layer using the Li\+S\+HT activation function. \end{DoxyCompactList}\item 
{\footnotesize template$<$class Activation\+Function  = Mish\+Function, typename Input\+Data\+Type  = arma\+::mat, typename Output\+Data\+Type  = arma\+::mat$>$ }\\using \textbf{ Mish\+Function\+Layer} = \textbf{ Base\+Layer}$<$ Activation\+Function, Input\+Data\+Type, Output\+Data\+Type $>$
\begin{DoxyCompactList}\small\item\em Standard Mish-\/\+Layer using the Mish activation function. \end{DoxyCompactList}\item 
using \textbf{ More\+Types} = boost\+::variant$<$ \textbf{ Recurrent}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Recurrent\+Attention}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Reinforce\+Normal}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Reparametrization}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Select}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Sequential}$<$ arma\+::mat, arma\+::mat, false $>$ $\ast$, \textbf{ Sequential}$<$ arma\+::mat, arma\+::mat, true $>$ $\ast$, \textbf{ Subview}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ V\+R\+Class\+Reward}$<$ arma\+::mat, arma\+::mat $>$ $\ast$, \textbf{ Virtual\+Batch\+Norm}$<$ arma\+::mat, arma\+::mat $>$ $\ast$$>$
\item 
{\footnotesize template$<$class Activation\+Function  = Rectifier\+Function, typename Input\+Data\+Type  = arma\+::mat, typename Output\+Data\+Type  = arma\+::mat$>$ }\\using \textbf{ Re\+L\+U\+Layer} = \textbf{ Base\+Layer}$<$ Activation\+Function, Input\+Data\+Type, Output\+Data\+Type $>$
\begin{DoxyCompactList}\small\item\em Standard rectified linear unit non-\/linearity layer. \end{DoxyCompactList}\item 
{\footnotesize template$<$typename Input\+Data\+Type  = arma\+::mat, typename Output\+Data\+Type  = arma\+::mat, typename... Custom\+Layers$>$ }\\using \textbf{ Residual} = \textbf{ Sequential}$<$ Input\+Data\+Type, Output\+Data\+Type, true, Custom\+Layers... $>$
\item 
using \textbf{ S\+E\+LU} = \textbf{ E\+LU}$<$ arma\+::mat, arma\+::mat $>$
\item 
{\footnotesize template$<$class Activation\+Function  = Logistic\+Function, typename Input\+Data\+Type  = arma\+::mat, typename Output\+Data\+Type  = arma\+::mat$>$ }\\using \textbf{ Sigmoid\+Layer} = \textbf{ Base\+Layer}$<$ Activation\+Function, Input\+Data\+Type, Output\+Data\+Type $>$
\begin{DoxyCompactList}\small\item\em Standard Sigmoid-\/\+Layer using the logistic activation function. \end{DoxyCompactList}\item 
{\footnotesize template$<$class Activation\+Function  = Softplus\+Function, typename Input\+Data\+Type  = arma\+::mat, typename Output\+Data\+Type  = arma\+::mat$>$ }\\using \textbf{ Soft\+Plus\+Layer} = \textbf{ Base\+Layer}$<$ Activation\+Function, Input\+Data\+Type, Output\+Data\+Type $>$
\begin{DoxyCompactList}\small\item\em Standard Softplus-\/\+Layer using the Softplus activation function. \end{DoxyCompactList}\item 
{\footnotesize template$<$class Activation\+Function  = Swish\+Function, typename Input\+Data\+Type  = arma\+::mat, typename Output\+Data\+Type  = arma\+::mat$>$ }\\using \textbf{ Swish\+Function\+Layer} = \textbf{ Base\+Layer}$<$ Activation\+Function, Input\+Data\+Type, Output\+Data\+Type $>$
\begin{DoxyCompactList}\small\item\em Standard Swish-\/\+Layer using the Swish activation function. \end{DoxyCompactList}\item 
{\footnotesize template$<$class Activation\+Function  = Tanh\+Function, typename Input\+Data\+Type  = arma\+::mat, typename Output\+Data\+Type  = arma\+::mat$>$ }\\using \textbf{ Tan\+H\+Layer} = \textbf{ Base\+Layer}$<$ Activation\+Function, Input\+Data\+Type, Output\+Data\+Type $>$
\begin{DoxyCompactList}\small\item\em Standard hyperbolic tangent layer. \end{DoxyCompactList}\item 
using \textbf{ Xavier\+Initialization} = \textbf{ Glorot\+Initialization\+Type}$<$ true $>$
\begin{DoxyCompactList}\small\item\em Xavier\+Initilization is the popular name for this method. \end{DoxyCompactList}\end{DoxyCompactItemize}
\subsection*{Functions}
\begin{DoxyCompactItemize}
\item 
\textbf{ H\+A\+S\+\_\+\+A\+N\+Y\+\_\+\+M\+E\+T\+H\+O\+D\+\_\+\+F\+O\+RM} (Model, Has\+Model\+Check)
\item 
\textbf{ H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC} (Gradient, Has\+Gradient\+Check)
\item 
\textbf{ H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC} (Deterministic, Has\+Deterministic\+Check)
\item 
\textbf{ H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC} (Parameters, Has\+Parameters\+Check)
\item 
\textbf{ H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC} (\textbf{ Add}, Has\+Add\+Check)
\item 
\textbf{ H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC} (Location, Has\+Location\+Check)
\item 
\textbf{ H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC} (Reset, Has\+Reset\+Check)
\item 
\textbf{ H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC} (Reset\+Cell, Has\+Reset\+Cell\+Check)
\item 
\textbf{ H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC} (Reward, Has\+Reward\+Check)
\item 
\textbf{ H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC} (Input\+Width, Has\+Input\+Width)
\item 
\textbf{ H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC} (Input\+Height, Has\+Input\+Height)
\item 
\textbf{ H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC} (Rho, Has\+Rho)
\item 
\textbf{ H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC} (Loss, Has\+Loss)
\item 
\textbf{ H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC} (Run, Has\+Run\+Check)
\item 
\textbf{ H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC} (Bias, Has\+Bias\+Check)
\item 
\textbf{ H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC} (Max\+Iterations, Has\+Max\+Iterations)
\item 
{\footnotesize template$<$typename Model\+Type $>$ }\\double \textbf{ Inception\+Score} (Model\+Type Model, arma\+::mat images, size\+\_\+t splits=1)
\begin{DoxyCompactList}\small\item\em Function that computes Inception Score for a set of images produced by a \doxyref{G\+AN}{p.}{classmlpack_1_1ann_1_1GAN}. \end{DoxyCompactList}\end{DoxyCompactItemize}


\subsection{Detailed Description}
Artificial Neural Network. 

Artifical Neural Network.

\subsection{Typedef Documentation}
\mbox{\label{namespacemlpack_1_1ann_ab8ac0d1eb11983be1bc7419ce15e91bf}} 
\index{mlpack\+::ann@{mlpack\+::ann}!Custom\+Layer@{Custom\+Layer}}
\index{Custom\+Layer@{Custom\+Layer}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{Custom\+Layer}
{\footnotesize\ttfamily using \textbf{ Custom\+Layer} =  \textbf{ Base\+Layer}$<$ Activation\+Function, Input\+Data\+Type, Output\+Data\+Type$>$}



Standard Sigmoid layer. 



Definition at line 31 of file custom\+\_\+layer.\+hpp.

\mbox{\label{namespacemlpack_1_1ann_aba712c4fa3b49cf06a01ef6867b958fb}} 
\index{mlpack\+::ann@{mlpack\+::ann}!Elish\+Function\+Layer@{Elish\+Function\+Layer}}
\index{Elish\+Function\+Layer@{Elish\+Function\+Layer}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{Elish\+Function\+Layer}
{\footnotesize\ttfamily using \textbf{ Elish\+Function\+Layer} =  \textbf{ Base\+Layer}$<$ Activation\+Function, Input\+Data\+Type, Output\+Data\+Type$>$}



Standard E\+Li\+S\+H-\/\+Layer using the E\+Li\+SH activation function. 



Definition at line 252 of file base\+\_\+layer.\+hpp.

\mbox{\label{namespacemlpack_1_1ann_aaf3ea313e70c222598e17bf4e23dd451}} 
\index{mlpack\+::ann@{mlpack\+::ann}!Embedding@{Embedding}}
\index{Embedding@{Embedding}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{Embedding}
{\footnotesize\ttfamily using \textbf{ Embedding} =  \textbf{ Lookup}$<$Mat\+Type, Mat\+Type$>$}



Definition at line 131 of file lookup.\+hpp.

\mbox{\label{namespacemlpack_1_1ann_a69253ae519ed598c1bf8b5e3368f6ba4}} 
\index{mlpack\+::ann@{mlpack\+::ann}!G\+E\+L\+U\+Function\+Layer@{G\+E\+L\+U\+Function\+Layer}}
\index{G\+E\+L\+U\+Function\+Layer@{G\+E\+L\+U\+Function\+Layer}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{G\+E\+L\+U\+Function\+Layer}
{\footnotesize\ttfamily using \textbf{ G\+E\+L\+U\+Function\+Layer} =  \textbf{ Base\+Layer}$<$ Activation\+Function, Input\+Data\+Type, Output\+Data\+Type$>$}



Standard G\+E\+L\+U-\/\+Layer using the G\+E\+LU activation function. 



Definition at line 241 of file base\+\_\+layer.\+hpp.

\mbox{\label{namespacemlpack_1_1ann_a003150a66fa8a2cc2a3650e2384a1dfc}} 
\index{mlpack\+::ann@{mlpack\+::ann}!Glorot\+Initialization@{Glorot\+Initialization}}
\index{Glorot\+Initialization@{Glorot\+Initialization}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{Glorot\+Initialization}
{\footnotesize\ttfamily using \textbf{ Glorot\+Initialization} =  \textbf{ Glorot\+Initialization\+Type}$<$false$>$}



Glorot\+Initialization uses uniform distribution. 



Definition at line 148 of file glorot\+\_\+init.\+hpp.

\mbox{\label{namespacemlpack_1_1ann_ae5bb0281a40f808dda254ea8d16d6acf}} 
\index{mlpack\+::ann@{mlpack\+::ann}!Hard\+Sigmoid\+Layer@{Hard\+Sigmoid\+Layer}}
\index{Hard\+Sigmoid\+Layer@{Hard\+Sigmoid\+Layer}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{Hard\+Sigmoid\+Layer}
{\footnotesize\ttfamily using \textbf{ Hard\+Sigmoid\+Layer} =  \textbf{ Base\+Layer}$<$ Activation\+Function, Input\+Data\+Type, Output\+Data\+Type$>$}



Standard Hard\+Sigmoid-\/\+Layer using the Hard\+Sigmoid activation function. 



Definition at line 197 of file base\+\_\+layer.\+hpp.

\mbox{\label{namespacemlpack_1_1ann_a41e9b25c5b33b7de07a0eac6c46dc085}} 
\index{mlpack\+::ann@{mlpack\+::ann}!Identity\+Layer@{Identity\+Layer}}
\index{Identity\+Layer@{Identity\+Layer}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{Identity\+Layer}
{\footnotesize\ttfamily using \textbf{ Identity\+Layer} =  \textbf{ Base\+Layer}$<$ Activation\+Function, Input\+Data\+Type, Output\+Data\+Type$>$}



Standard Identity-\/\+Layer using the identity activation function. 



Definition at line 153 of file base\+\_\+layer.\+hpp.

\mbox{\label{namespacemlpack_1_1ann_a137169a12d6f400a718d7383f3365112}} 
\index{mlpack\+::ann@{mlpack\+::ann}!L1\+Regularizer@{L1\+Regularizer}}
\index{L1\+Regularizer@{L1\+Regularizer}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{L1\+Regularizer}
{\footnotesize\ttfamily typedef \textbf{ L\+Regularizer}$<$1$>$ \textbf{ L1\+Regularizer}}



The L1 Regularizer. 



Definition at line 62 of file lregularizer.\+hpp.

\mbox{\label{namespacemlpack_1_1ann_ac3b4459576bd0564e145e049ee1549ce}} 
\index{mlpack\+::ann@{mlpack\+::ann}!L2\+Regularizer@{L2\+Regularizer}}
\index{L2\+Regularizer@{L2\+Regularizer}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{L2\+Regularizer}
{\footnotesize\ttfamily typedef \textbf{ L\+Regularizer}$<$2$>$ \textbf{ L2\+Regularizer}}



The L2 Regularizer. 



Definition at line 67 of file lregularizer.\+hpp.

\mbox{\label{namespacemlpack_1_1ann_ae444f705ce04c1cafee6c73b0383bb22}} 
\index{mlpack\+::ann@{mlpack\+::ann}!Layer\+Types@{Layer\+Types}}
\index{Layer\+Types@{Layer\+Types}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{Layer\+Types}
{\footnotesize\ttfamily using \textbf{ Layer\+Types} =  boost\+::variant$<$ \textbf{ Add}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Add\+Merge}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Atrous\+Convolution}$<$\textbf{ Naive\+Convolution}$<$\textbf{ Valid\+Convolution}$>$, \textbf{ Naive\+Convolution}$<$\textbf{ Full\+Convolution}$>$, \textbf{ Naive\+Convolution}$<$\textbf{ Valid\+Convolution}$>$, arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Base\+Layer}$<$\textbf{ Logistic\+Function}, arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Base\+Layer}$<$\textbf{ Identity\+Function}, arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Base\+Layer}$<$\textbf{ Tanh\+Function}, arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Base\+Layer}$<$\textbf{ Rectifier\+Function}, arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Base\+Layer}$<$\textbf{ Softplus\+Function}, arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Batch\+Norm}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Bilinear\+Interpolation}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Concat}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Concatenate}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Concat\+Performance}$<$\textbf{ Negative\+Log\+Likelihood}$<$arma\+::mat, arma\+::mat$>$, arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Constant}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Convolution}$<$\textbf{ Naive\+Convolution}$<$\textbf{ Valid\+Convolution}$>$, \textbf{ Naive\+Convolution}$<$\textbf{ Full\+Convolution}$>$, \textbf{ Naive\+Convolution}$<$\textbf{ Valid\+Convolution}$>$, arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Transposed\+Convolution}$<$\textbf{ Naive\+Convolution}$<$\textbf{ Valid\+Convolution}$>$, \textbf{ Naive\+Convolution}$<$\textbf{ Valid\+Convolution}$>$, \textbf{ Naive\+Convolution}$<$\textbf{ Valid\+Convolution}$>$, arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Drop\+Connect}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Dropout}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Alpha\+Dropout}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ E\+LU}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Flexible\+Re\+LU}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Glimpse}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Hard\+TanH}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Highway}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Join}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Layer\+Norm}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Leaky\+Re\+LU}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ C\+Re\+LU}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Linear}$<$arma\+::mat, arma\+::mat, \textbf{ No\+Regularizer}$>$$\ast$, \textbf{ Linear\+No\+Bias}$<$arma\+::mat, arma\+::mat, \textbf{ No\+Regularizer}$>$$\ast$, \textbf{ Log\+Soft\+Max}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Lookup}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ L\+S\+TM}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ G\+RU}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Fast\+L\+S\+TM}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Max\+Pooling}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Mean\+Pooling}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Mini\+Batch\+Discrimination}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Multiply\+Constant}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Multiply\+Merge}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Negative\+Log\+Likelihood}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Padding}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ P\+Re\+LU}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Weight\+Norm}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ C\+E\+LU}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ More\+Types}, Custom\+Layers$\ast$... $>$}



Definition at line 252 of file layer\+\_\+types.\+hpp.

\mbox{\label{namespacemlpack_1_1ann_aebe38e4259931f33c44701ba75d6240d}} 
\index{mlpack\+::ann@{mlpack\+::ann}!Li\+S\+H\+T\+Function\+Layer@{Li\+S\+H\+T\+Function\+Layer}}
\index{Li\+S\+H\+T\+Function\+Layer@{Li\+S\+H\+T\+Function\+Layer}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{Li\+S\+H\+T\+Function\+Layer}
{\footnotesize\ttfamily using \textbf{ Li\+S\+H\+T\+Function\+Layer} =  \textbf{ Base\+Layer}$<$ Activation\+Function, Input\+Data\+Type, Output\+Data\+Type$>$}



Standard Li\+S\+H\+T-\/\+Layer using the Li\+S\+HT activation function. 



Definition at line 230 of file base\+\_\+layer.\+hpp.

\mbox{\label{namespacemlpack_1_1ann_a52aa33cac06fc8dbf5eefcb4e1858fea}} 
\index{mlpack\+::ann@{mlpack\+::ann}!Mish\+Function\+Layer@{Mish\+Function\+Layer}}
\index{Mish\+Function\+Layer@{Mish\+Function\+Layer}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{Mish\+Function\+Layer}
{\footnotesize\ttfamily using \textbf{ Mish\+Function\+Layer} =  \textbf{ Base\+Layer}$<$ Activation\+Function, Input\+Data\+Type, Output\+Data\+Type$>$}



Standard Mish-\/\+Layer using the Mish activation function. 



Definition at line 219 of file base\+\_\+layer.\+hpp.

\mbox{\label{namespacemlpack_1_1ann_aefb02cdbf2ab803a40a1f507e9a78b19}} 
\index{mlpack\+::ann@{mlpack\+::ann}!More\+Types@{More\+Types}}
\index{More\+Types@{More\+Types}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{More\+Types}
{\footnotesize\ttfamily using \textbf{ More\+Types} =  boost\+::variant$<$ \textbf{ Recurrent}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Recurrent\+Attention}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Reinforce\+Normal}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Reparametrization}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Select}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Sequential}$<$arma\+::mat, arma\+::mat, false$>$$\ast$, \textbf{ Sequential}$<$arma\+::mat, arma\+::mat, true$>$$\ast$, \textbf{ Subview}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ V\+R\+Class\+Reward}$<$arma\+::mat, arma\+::mat$>$$\ast$, \textbf{ Virtual\+Batch\+Norm}$<$arma\+::mat, arma\+::mat$>$$\ast$ $>$}



Definition at line 193 of file layer\+\_\+types.\+hpp.

\mbox{\label{namespacemlpack_1_1ann_a06b6e29ab52ee62d0bccbf108d64d1a2}} 
\index{mlpack\+::ann@{mlpack\+::ann}!Re\+L\+U\+Layer@{Re\+L\+U\+Layer}}
\index{Re\+L\+U\+Layer@{Re\+L\+U\+Layer}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{Re\+L\+U\+Layer}
{\footnotesize\ttfamily using \textbf{ Re\+L\+U\+Layer} =  \textbf{ Base\+Layer}$<$ Activation\+Function, Input\+Data\+Type, Output\+Data\+Type$>$}



Standard rectified linear unit non-\/linearity layer. 



Definition at line 164 of file base\+\_\+layer.\+hpp.

\mbox{\label{namespacemlpack_1_1ann_ac4f089366ec4066e82d7c4ecae664a46}} 
\index{mlpack\+::ann@{mlpack\+::ann}!Residual@{Residual}}
\index{Residual@{Residual}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{Residual}
{\footnotesize\ttfamily using \textbf{ Residual} =  \textbf{ Sequential}$<$ Input\+Data\+Type, Output\+Data\+Type, true, Custom\+Layers...$>$}



Definition at line 248 of file sequential.\+hpp.

\mbox{\label{namespacemlpack_1_1ann_ac08f9682be904369ec09e68b43b09fad}} 
\index{mlpack\+::ann@{mlpack\+::ann}!S\+E\+LU@{S\+E\+LU}}
\index{S\+E\+LU@{S\+E\+LU}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{S\+E\+LU}
{\footnotesize\ttfamily using \textbf{ S\+E\+LU} =  \textbf{ E\+LU}$<$arma\+::mat, arma\+::mat$>$}



Definition at line 207 of file elu.\+hpp.

\mbox{\label{namespacemlpack_1_1ann_ad4f35bf0f4f5e2750668e17c2d07a27b}} 
\index{mlpack\+::ann@{mlpack\+::ann}!Sigmoid\+Layer@{Sigmoid\+Layer}}
\index{Sigmoid\+Layer@{Sigmoid\+Layer}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{Sigmoid\+Layer}
{\footnotesize\ttfamily using \textbf{ Sigmoid\+Layer} =  \textbf{ Base\+Layer}$<$ Activation\+Function, Input\+Data\+Type, Output\+Data\+Type$>$}



Standard Sigmoid-\/\+Layer using the logistic activation function. 



Definition at line 142 of file base\+\_\+layer.\+hpp.

\mbox{\label{namespacemlpack_1_1ann_a3b84f714a815d838e34c11a59480cd1c}} 
\index{mlpack\+::ann@{mlpack\+::ann}!Soft\+Plus\+Layer@{Soft\+Plus\+Layer}}
\index{Soft\+Plus\+Layer@{Soft\+Plus\+Layer}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{Soft\+Plus\+Layer}
{\footnotesize\ttfamily using \textbf{ Soft\+Plus\+Layer} =  \textbf{ Base\+Layer}$<$ Activation\+Function, Input\+Data\+Type, Output\+Data\+Type$>$}



Standard Softplus-\/\+Layer using the Softplus activation function. 



Definition at line 186 of file base\+\_\+layer.\+hpp.

\mbox{\label{namespacemlpack_1_1ann_a8ea44af9d438cf5fc4098a0edc9e74a4}} 
\index{mlpack\+::ann@{mlpack\+::ann}!Swish\+Function\+Layer@{Swish\+Function\+Layer}}
\index{Swish\+Function\+Layer@{Swish\+Function\+Layer}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{Swish\+Function\+Layer}
{\footnotesize\ttfamily using \textbf{ Swish\+Function\+Layer} =  \textbf{ Base\+Layer}$<$ Activation\+Function, Input\+Data\+Type, Output\+Data\+Type$>$}



Standard Swish-\/\+Layer using the Swish activation function. 



Definition at line 208 of file base\+\_\+layer.\+hpp.

\mbox{\label{namespacemlpack_1_1ann_acc8e7f6b94728a4a6eb310677b5bc532}} 
\index{mlpack\+::ann@{mlpack\+::ann}!Tan\+H\+Layer@{Tan\+H\+Layer}}
\index{Tan\+H\+Layer@{Tan\+H\+Layer}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{Tan\+H\+Layer}
{\footnotesize\ttfamily using \textbf{ Tan\+H\+Layer} =  \textbf{ Base\+Layer}$<$ Activation\+Function, Input\+Data\+Type, Output\+Data\+Type$>$}



Standard hyperbolic tangent layer. 



Definition at line 175 of file base\+\_\+layer.\+hpp.

\mbox{\label{namespacemlpack_1_1ann_a4f99a527ad82262756bb7e3785e8201a}} 
\index{mlpack\+::ann@{mlpack\+::ann}!Xavier\+Initialization@{Xavier\+Initialization}}
\index{Xavier\+Initialization@{Xavier\+Initialization}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{Xavier\+Initialization}
{\footnotesize\ttfamily using \textbf{ Xavier\+Initialization} =  \textbf{ Glorot\+Initialization\+Type}$<$true$>$}



Xavier\+Initilization is the popular name for this method. 



Definition at line 143 of file glorot\+\_\+init.\+hpp.



\subsection{Function Documentation}
\mbox{\label{namespacemlpack_1_1ann_a5aeaf3e16247ebd569074c32cab63c70}} 
\index{mlpack\+::ann@{mlpack\+::ann}!H\+A\+S\+\_\+\+A\+N\+Y\+\_\+\+M\+E\+T\+H\+O\+D\+\_\+\+F\+O\+RM@{H\+A\+S\+\_\+\+A\+N\+Y\+\_\+\+M\+E\+T\+H\+O\+D\+\_\+\+F\+O\+RM}}
\index{H\+A\+S\+\_\+\+A\+N\+Y\+\_\+\+M\+E\+T\+H\+O\+D\+\_\+\+F\+O\+RM@{H\+A\+S\+\_\+\+A\+N\+Y\+\_\+\+M\+E\+T\+H\+O\+D\+\_\+\+F\+O\+RM}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{H\+A\+S\+\_\+\+A\+N\+Y\+\_\+\+M\+E\+T\+H\+O\+D\+\_\+\+F\+O\+R\+M()}
{\footnotesize\ttfamily mlpack\+::ann\+::\+H\+A\+S\+\_\+\+A\+N\+Y\+\_\+\+M\+E\+T\+H\+O\+D\+\_\+\+F\+O\+RM (\begin{DoxyParamCaption}\item[{Model}]{,  }\item[{Has\+Model\+Check}]{ }\end{DoxyParamCaption})}

\mbox{\label{namespacemlpack_1_1ann_ac1b6745deedbcee048f2387da27389d4}} 
\index{mlpack\+::ann@{mlpack\+::ann}!H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}}
\index{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+N\+C()\hspace{0.1cm}{\footnotesize\ttfamily [1/15]}}
{\footnotesize\ttfamily mlpack\+::ann\+::\+H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC (\begin{DoxyParamCaption}\item[{Gradient}]{,  }\item[{Has\+Gradient\+Check}]{ }\end{DoxyParamCaption})}

\mbox{\label{namespacemlpack_1_1ann_ae95d86bb222cc89639472577da586357}} 
\index{mlpack\+::ann@{mlpack\+::ann}!H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}}
\index{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+N\+C()\hspace{0.1cm}{\footnotesize\ttfamily [2/15]}}
{\footnotesize\ttfamily mlpack\+::ann\+::\+H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC (\begin{DoxyParamCaption}\item[{Deterministic}]{,  }\item[{Has\+Deterministic\+Check}]{ }\end{DoxyParamCaption})}

\mbox{\label{namespacemlpack_1_1ann_add5ad48dbc07b098c8df806a7d100de7}} 
\index{mlpack\+::ann@{mlpack\+::ann}!H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}}
\index{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+N\+C()\hspace{0.1cm}{\footnotesize\ttfamily [3/15]}}
{\footnotesize\ttfamily mlpack\+::ann\+::\+H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC (\begin{DoxyParamCaption}\item[{Parameters}]{,  }\item[{Has\+Parameters\+Check}]{ }\end{DoxyParamCaption})}

\mbox{\label{namespacemlpack_1_1ann_a923497f92d9b28cfe7143d40e00c6bfc}} 
\index{mlpack\+::ann@{mlpack\+::ann}!H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}}
\index{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+N\+C()\hspace{0.1cm}{\footnotesize\ttfamily [4/15]}}
{\footnotesize\ttfamily mlpack\+::ann\+::\+H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC (\begin{DoxyParamCaption}\item[{\textbf{ Add}}]{,  }\item[{Has\+Add\+Check}]{ }\end{DoxyParamCaption})}

\mbox{\label{namespacemlpack_1_1ann_a9ddaef84cd236998b57624b9b4d2eebe}} 
\index{mlpack\+::ann@{mlpack\+::ann}!H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}}
\index{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+N\+C()\hspace{0.1cm}{\footnotesize\ttfamily [5/15]}}
{\footnotesize\ttfamily mlpack\+::ann\+::\+H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC (\begin{DoxyParamCaption}\item[{Location}]{,  }\item[{Has\+Location\+Check}]{ }\end{DoxyParamCaption})}

\mbox{\label{namespacemlpack_1_1ann_a7af914cacab417f183e2fc0051a5345a}} 
\index{mlpack\+::ann@{mlpack\+::ann}!H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}}
\index{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+N\+C()\hspace{0.1cm}{\footnotesize\ttfamily [6/15]}}
{\footnotesize\ttfamily mlpack\+::ann\+::\+H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC (\begin{DoxyParamCaption}\item[{Reset}]{,  }\item[{Has\+Reset\+Check}]{ }\end{DoxyParamCaption})}

\mbox{\label{namespacemlpack_1_1ann_a5278fc5426da6ac56df6540dabd508e8}} 
\index{mlpack\+::ann@{mlpack\+::ann}!H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}}
\index{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+N\+C()\hspace{0.1cm}{\footnotesize\ttfamily [7/15]}}
{\footnotesize\ttfamily mlpack\+::ann\+::\+H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC (\begin{DoxyParamCaption}\item[{Reset\+Cell}]{,  }\item[{Has\+Reset\+Cell\+Check}]{ }\end{DoxyParamCaption})}

\mbox{\label{namespacemlpack_1_1ann_ad4b16a6a10d1b1d3999d177f03b1f4a0}} 
\index{mlpack\+::ann@{mlpack\+::ann}!H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}}
\index{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+N\+C()\hspace{0.1cm}{\footnotesize\ttfamily [8/15]}}
{\footnotesize\ttfamily mlpack\+::ann\+::\+H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC (\begin{DoxyParamCaption}\item[{Reward}]{,  }\item[{Has\+Reward\+Check}]{ }\end{DoxyParamCaption})}

\mbox{\label{namespacemlpack_1_1ann_a4dfcd41ff0d3c6ea37dda6c9a35c832f}} 
\index{mlpack\+::ann@{mlpack\+::ann}!H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}}
\index{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+N\+C()\hspace{0.1cm}{\footnotesize\ttfamily [9/15]}}
{\footnotesize\ttfamily mlpack\+::ann\+::\+H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC (\begin{DoxyParamCaption}\item[{Input\+Width}]{,  }\item[{Has\+Input\+Width}]{ }\end{DoxyParamCaption})}

\mbox{\label{namespacemlpack_1_1ann_addfd94f5ac2aa2225484ddddb06b8320}} 
\index{mlpack\+::ann@{mlpack\+::ann}!H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}}
\index{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+N\+C()\hspace{0.1cm}{\footnotesize\ttfamily [10/15]}}
{\footnotesize\ttfamily mlpack\+::ann\+::\+H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC (\begin{DoxyParamCaption}\item[{Input\+Height}]{,  }\item[{Has\+Input\+Height}]{ }\end{DoxyParamCaption})}

\mbox{\label{namespacemlpack_1_1ann_a1e25664538ca94074ff5636b85902c8a}} 
\index{mlpack\+::ann@{mlpack\+::ann}!H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}}
\index{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+N\+C()\hspace{0.1cm}{\footnotesize\ttfamily [11/15]}}
{\footnotesize\ttfamily mlpack\+::ann\+::\+H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC (\begin{DoxyParamCaption}\item[{Rho}]{,  }\item[{Has\+Rho}]{ }\end{DoxyParamCaption})}

\mbox{\label{namespacemlpack_1_1ann_af302c82cfb8bb5c0871c8a876e70adcc}} 
\index{mlpack\+::ann@{mlpack\+::ann}!H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}}
\index{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+N\+C()\hspace{0.1cm}{\footnotesize\ttfamily [12/15]}}
{\footnotesize\ttfamily mlpack\+::ann\+::\+H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC (\begin{DoxyParamCaption}\item[{Loss}]{,  }\item[{Has\+Loss}]{ }\end{DoxyParamCaption})}

\mbox{\label{namespacemlpack_1_1ann_af4586b834d3c6bd15695550314404738}} 
\index{mlpack\+::ann@{mlpack\+::ann}!H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}}
\index{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+N\+C()\hspace{0.1cm}{\footnotesize\ttfamily [13/15]}}
{\footnotesize\ttfamily mlpack\+::ann\+::\+H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC (\begin{DoxyParamCaption}\item[{Run}]{,  }\item[{Has\+Run\+Check}]{ }\end{DoxyParamCaption})}

\mbox{\label{namespacemlpack_1_1ann_aa339cfab0c5987cfec78736f19e50373}} 
\index{mlpack\+::ann@{mlpack\+::ann}!H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}}
\index{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+N\+C()\hspace{0.1cm}{\footnotesize\ttfamily [14/15]}}
{\footnotesize\ttfamily mlpack\+::ann\+::\+H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC (\begin{DoxyParamCaption}\item[{Bias}]{,  }\item[{Has\+Bias\+Check}]{ }\end{DoxyParamCaption})}

\mbox{\label{namespacemlpack_1_1ann_a8e7950714181dc8adf55752f45467dd8}} 
\index{mlpack\+::ann@{mlpack\+::ann}!H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}}
\index{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC@{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+N\+C()\hspace{0.1cm}{\footnotesize\ttfamily [15/15]}}
{\footnotesize\ttfamily mlpack\+::ann\+::\+H\+A\+S\+\_\+\+M\+E\+M\+\_\+\+F\+U\+NC (\begin{DoxyParamCaption}\item[{Max\+Iterations}]{,  }\item[{Has\+Max\+Iterations}]{ }\end{DoxyParamCaption})}

\mbox{\label{namespacemlpack_1_1ann_ad1c987f983baef10e712bde7a3f36c98}} 
\index{mlpack\+::ann@{mlpack\+::ann}!Inception\+Score@{Inception\+Score}}
\index{Inception\+Score@{Inception\+Score}!mlpack\+::ann@{mlpack\+::ann}}
\subsubsection{Inception\+Score()}
{\footnotesize\ttfamily double mlpack\+::ann\+::\+Inception\+Score (\begin{DoxyParamCaption}\item[{Model\+Type}]{Model,  }\item[{arma\+::mat}]{images,  }\item[{size\+\_\+t}]{splits = {\ttfamily 1} }\end{DoxyParamCaption})}



Function that computes Inception Score for a set of images produced by a \doxyref{G\+AN}{p.}{classmlpack_1_1ann_1_1GAN}. 

For more information, see the following.


\begin{DoxyCode}
@article\{Goodfellow2016,
  author  = \{Tim Salimans, Ian Goodfellow, Wojciech Zaremba, Vicki Cheung,
             Alec Radford, Xi Chen\},
  title   = \{Improved Techniques \textcolor{keywordflow}{for} Training GANs\},
  year    = \{2016\},
  url     = \{https:\textcolor{comment}{//arxiv.org/abs/1606.03498\},}
\}
\end{DoxyCode}



\begin{DoxyParams}{Parameters}
{\em Model} & Model for evaluating the quality of images. \\
\hline
{\em images} & Images generated by \doxyref{G\+AN}{p.}{classmlpack_1_1ann_1_1GAN}. \\
\hline
\end{DoxyParams}
