\hypertarget{class_tokenizer}{}\section{Tokenizer Class Reference}
\label{class_tokenizer}\index{Tokenizer@{Tokenizer}}


{\ttfamily \#include $<$tokenizer.\+hpp$>$}

\subsection*{Public Member Functions}
\begin{DoxyCompactItemize}
\item 
\hyperlink{class_tokenizer_a2a6c04ea8c784f66bebcb6df7073769c}{Tokenizer} ()
\item 
\hyperlink{class_tokenizer_a6edc9ba4af94d2aa55f48a83c903800f}{Tokenizer} (const std\+::vector$<$ std\+::string $>$ \&lines)
\item 
void \hyperlink{class_tokenizer_a8bd8a4eb5df764f6128028daa0e9044b}{split} (const std\+::vector$<$ std\+::string $>$ \&lines)
\item 
void \hyperlink{class_tokenizer_ae928efe72c00908a3529747b4cfd01d5}{parse} (const std\+::list$<$ std\+::string $>$ \&\hyperlink{class_tokenizer_a89707ad3a758fc9ec58f00d92d5fc622}{raw\+Tokens})
\end{DoxyCompactItemize}
\subsection*{Public Attributes}
\begin{DoxyCompactItemize}
\item 
std\+::list$<$ std\+::string $>$ \hyperlink{class_tokenizer_a89707ad3a758fc9ec58f00d92d5fc622}{raw\+Tokens}
\item 
std\+::list$<$ \hyperlink{struct_token}{Token} $>$ \hyperlink{class_tokenizer_ae547093dbd03b3e70373147e4669d9fa}{tokens}
\item 
bool \hyperlink{class_tokenizer_a330a4cce0cbf3ebfbe601d97022d1ed4}{complete}
\end{DoxyCompactItemize}


\subsection{Detailed Description}


Definition at line 7 of file tokenizer.\+hpp.



\subsection{Constructor \& Destructor Documentation}
\hypertarget{class_tokenizer_a2a6c04ea8c784f66bebcb6df7073769c}{}\index{Tokenizer@{Tokenizer}!Tokenizer@{Tokenizer}}
\index{Tokenizer@{Tokenizer}!Tokenizer@{Tokenizer}}
\subsubsection[{Tokenizer}]{\setlength{\rightskip}{0pt plus 5cm}Tokenizer\+::\+Tokenizer (
\begin{DoxyParamCaption}
{}
\end{DoxyParamCaption}
)}\label{class_tokenizer_a2a6c04ea8c784f66bebcb6df7073769c}


Definition at line 18 of file tokenizer.\+cpp.

\hypertarget{class_tokenizer_a6edc9ba4af94d2aa55f48a83c903800f}{}\index{Tokenizer@{Tokenizer}!Tokenizer@{Tokenizer}}
\index{Tokenizer@{Tokenizer}!Tokenizer@{Tokenizer}}
\subsubsection[{Tokenizer}]{\setlength{\rightskip}{0pt plus 5cm}Tokenizer\+::\+Tokenizer (
\begin{DoxyParamCaption}
\item[{const std\+::vector$<$ std\+::string $>$ \&}]{lines}
\end{DoxyParamCaption}
)}\label{class_tokenizer_a6edc9ba4af94d2aa55f48a83c903800f}


Definition at line 21 of file tokenizer.\+cpp.



\subsection{Member Function Documentation}
\hypertarget{class_tokenizer_ae928efe72c00908a3529747b4cfd01d5}{}\index{Tokenizer@{Tokenizer}!parse@{parse}}
\index{parse@{parse}!Tokenizer@{Tokenizer}}
\subsubsection[{parse}]{\setlength{\rightskip}{0pt plus 5cm}void Tokenizer\+::parse (
\begin{DoxyParamCaption}
\item[{const std\+::list$<$ std\+::string $>$ \&}]{raw\+Tokens}
\end{DoxyParamCaption}
)}\label{class_tokenizer_ae928efe72c00908a3529747b4cfd01d5}


Definition at line 104 of file tokenizer.\+cpp.

\hypertarget{class_tokenizer_a8bd8a4eb5df764f6128028daa0e9044b}{}\index{Tokenizer@{Tokenizer}!split@{split}}
\index{split@{split}!Tokenizer@{Tokenizer}}
\subsubsection[{split}]{\setlength{\rightskip}{0pt plus 5cm}void Tokenizer\+::split (
\begin{DoxyParamCaption}
\item[{const std\+::vector$<$ std\+::string $>$ \&}]{lines}
\end{DoxyParamCaption}
)}\label{class_tokenizer_a8bd8a4eb5df764f6128028daa0e9044b}


Definition at line 27 of file tokenizer.\+cpp.



\subsection{Member Data Documentation}
\hypertarget{class_tokenizer_a330a4cce0cbf3ebfbe601d97022d1ed4}{}\index{Tokenizer@{Tokenizer}!complete@{complete}}
\index{complete@{complete}!Tokenizer@{Tokenizer}}
\subsubsection[{complete}]{\setlength{\rightskip}{0pt plus 5cm}bool Tokenizer\+::complete}\label{class_tokenizer_a330a4cce0cbf3ebfbe601d97022d1ed4}


Definition at line 16 of file tokenizer.\+hpp.

\hypertarget{class_tokenizer_a89707ad3a758fc9ec58f00d92d5fc622}{}\index{Tokenizer@{Tokenizer}!raw\+Tokens@{raw\+Tokens}}
\index{raw\+Tokens@{raw\+Tokens}!Tokenizer@{Tokenizer}}
\subsubsection[{raw\+Tokens}]{\setlength{\rightskip}{0pt plus 5cm}std\+::list$<$std\+::string$>$ Tokenizer\+::raw\+Tokens}\label{class_tokenizer_a89707ad3a758fc9ec58f00d92d5fc622}


Definition at line 14 of file tokenizer.\+hpp.

\hypertarget{class_tokenizer_ae547093dbd03b3e70373147e4669d9fa}{}\index{Tokenizer@{Tokenizer}!tokens@{tokens}}
\index{tokens@{tokens}!Tokenizer@{Tokenizer}}
\subsubsection[{tokens}]{\setlength{\rightskip}{0pt plus 5cm}std\+::list$<${\bf Token}$>$ Tokenizer\+::tokens}\label{class_tokenizer_ae547093dbd03b3e70373147e4669d9fa}


Definition at line 15 of file tokenizer.\+hpp.



The documentation for this class was generated from the following files\+:\begin{DoxyCompactItemize}
\item 
\hyperlink{tokenizer_8hpp}{tokenizer.\+hpp}\item 
\hyperlink{tokenizer_8cpp}{tokenizer.\+cpp}\end{DoxyCompactItemize}
