\section{/var/www/mlpack.ratml.\+org/mlpack.org/\+\_\+src/mlpack-\/git/src/mlpack/methods/ann/layer/multihead\+\_\+attention.hpp File Reference}
\label{multihead__attention_8hpp}\index{/var/www/mlpack.\+ratml.\+org/mlpack.\+org/\+\_\+src/mlpack-\/git/src/mlpack/methods/ann/layer/multihead\+\_\+attention.\+hpp@{/var/www/mlpack.\+ratml.\+org/mlpack.\+org/\+\_\+src/mlpack-\/git/src/mlpack/methods/ann/layer/multihead\+\_\+attention.\+hpp}}
Include dependency graph for multihead\+\_\+attention.\+hpp\+:
\nopagebreak
\begin{figure}[H]
\begin{center}
\leavevmode
\includegraphics[width=350pt]{multihead__attention_8hpp__incl}
\end{center}
\end{figure}
This graph shows which files directly or indirectly include this file\+:
\nopagebreak
\begin{figure}[H]
\begin{center}
\leavevmode
\includegraphics[width=350pt]{multihead__attention_8hpp__dep__incl}
\end{center}
\end{figure}
\subsection*{Classes}
\begin{DoxyCompactItemize}
\item 
class \textbf{ Multihead\+Attention$<$ Input\+Data\+Type, Output\+Data\+Type, Regularizer\+Type $>$}
\begin{DoxyCompactList}\small\item\em Multihead Attention allows the model to jointly attend to information from different representation subspaces at different positions. \end{DoxyCompactList}\end{DoxyCompactItemize}
\subsection*{Namespaces}
\begin{DoxyCompactItemize}
\item 
 \textbf{ mlpack}
\begin{DoxyCompactList}\small\item\em Linear algebra utility functions, generally performed on matrices or vectors. \end{DoxyCompactList}\item 
 \textbf{ mlpack\+::ann}
\begin{DoxyCompactList}\small\item\em Artificial Neural Network. \end{DoxyCompactList}\end{DoxyCompactItemize}


\subsection{Detailed Description}
\begin{DoxyAuthor}{Author}
Mrityunjay Tripathi
\end{DoxyAuthor}
Definition of the Multihead\+Attention class.


\begin{DoxyCode}
@article\{NIPS\textcolor{stringliteral}{'17,}
\textcolor{stringliteral}{  author  = \{Ashish Vaswani, Llion Jones, Noam Shazeer, Niki Parmar,}
\textcolor{stringliteral}{             Aidan N. Gomez, Jakob Uszkoreit, Łukasz Kaiser,}
\textcolor{stringliteral}{             Illia Polosukhin\},}
\textcolor{stringliteral}{  title   = \{Attention Is All You Need\},}
\textcolor{stringliteral}{  year    = \{2017\},}
\textcolor{stringliteral}{  url     = \{http://arxiv.org/abs/1706.03762v5\}}
\textcolor{stringliteral}{\}}
\end{DoxyCode}


mlpack is free software; you may redistribute it and/or modify it under the terms of the 3-\/clause B\+SD license. You should have received a copy of the 3-\/clause B\+SD license along with mlpack. If not, see {\tt http\+://www.\+opensource.\+org/licenses/\+B\+S\+D-\/3-\/\+Clause} for more information. 