\relax 
\ifx\hyper@anchor\@undefined
\global \let \oldcontentsline\contentsline
\gdef \contentsline#1#2#3#4{\oldcontentsline{#1}{#2}{#3}}
\global \let \oldnewlabel\newlabel
\gdef \newlabel#1#2{\newlabelxx{#1}#2}
\gdef \newlabelxx#1#2#3#4#5#6{\oldnewlabel{#1}{{#2}{#3}}}
\AtEndDocument{\let \contentsline\oldcontentsline
\let \newlabel\oldnewlabel}
\else
\global \let \hyper@last\relax 
\fi

\select@language{english}
\@writefile{toc}{\select@language{english}}
\@writefile{lof}{\select@language{english}}
\@writefile{lot}{\select@language{english}}
\@writefile{toc}{\contentsline {chapter}{\numberline {1}Introduction}{4}{chapter.1}}
\@writefile{lof}{\addvspace {10\p@ }}
\@writefile{lot}{\addvspace {10\p@ }}
\@writefile{toc}{\contentsline {chapter}{\numberline {2}Theory}{5}{chapter.2}}
\@writefile{lof}{\addvspace {10\p@ }}
\@writefile{lot}{\addvspace {10\p@ }}
\@writefile{toc}{\contentsline {section}{\numberline {2.1}Artificial outlier Generation}{5}{section.2.1}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.1.1}Hypersphere}{5}{subsection.2.1.1}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.1.2}Hypercube}{6}{subsection.2.1.2}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.1.3}PERHAPS::Sample from marginal Distribution}{7}{subsection.2.1.3}}
\@writefile{toc}{\contentsline {section}{\numberline {2.2}Reiceiver Operator Characteristic Graphs}{7}{section.2.2}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.1}Confusion Matrix and Classifier Performance}{7}{subsection.2.2.1}}
\@writefile{lot}{\contentsline {table}{\numberline {2.1}{\ignorespaces Confusion Matrix}}{8}{table.2.1}}
\newlabel{tab:confusion matrix}{{2.1}{8}{Confusion Matrix and Classifier Performance\relax }{table.2.1}{}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.2}ROC Space}{8}{subsection.2.2.2}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.3}ROC Curves and Area Under Curve}{8}{subsection.2.2.3}}
\@writefile{lof}{\contentsline {figure}{\numberline {2.1}{\ignorespaces A basic ROC graph showing some discrete classifiers with different performances.}}{9}{figure.2.1}}
\newlabel{fig:roc space}{{2.1}{9}{ROC Space\relax }{figure.2.1}{}}
\@writefile{lof}{\contentsline {figure}{\numberline {2.2}{\ignorespaces Different ROC Curves}}{9}{figure.2.2}}
\newlabel{fig:roc curves}{{2.2}{9}{ROC Curves and Area Under Curve\relax }{figure.2.2}{}}
\@writefile{toc}{\contentsline {section}{\numberline {2.3}Support Vector Machines}{10}{section.2.3}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.3.1}Optimal Separating Hyperplane}{10}{subsection.2.3.1}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.3.2}Support Vector Machines Algorithm}{10}{subsection.2.3.2}}
\newlabel{eqn:hyperplane}{{2.11}{10}{Support Vector Machines Algorithm\relax }{equation.2.11}{}}
\newlabel{eqn:decisionfct}{{2.13}{11}{Support Vector Machines Algorithm\relax }{equation.2.13}{}}
\newlabel{eqn:primal}{{2.17}{11}{Support Vector Machines Algorithm\relax }{equation.2.17}{}}
\newlabel{eqn:primalsubject}{{2.18}{11}{Support Vector Machines Algorithm\relax }{equation.2.18}{}}
\newlabel{eqn:lagrangian}{{2.19}{11}{Support Vector Machines Algorithm\relax }{equation.2.19}{}}
\newlabel{eqn:derv1}{{2.22}{12}{Support Vector Machines Algorithm\relax }{equation.2.22}{}}
\newlabel{eqn:derv2}{{2.23}{12}{Support Vector Machines Algorithm\relax }{equation.2.23}{}}
\newlabel{eqn:dual}{{2.24}{12}{Support Vector Machines Algorithm\relax }{equation.2.24}{}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.3.3}Nonlinear Support Vector Classifier}{12}{subsection.2.3.3}}
\@writefile{toc}{\contentsline {subsubsection}{Soft Margin Hyperplane}{13}{section*.2}}
\newlabel{eqn:cprimal1}{{2.28}{13}{Soft Margin Hyperplane\relax }{equation.2.28}{}}
\newlabel{eqn:cprimal2}{{2.29}{13}{Soft Margin Hyperplane\relax }{equation.2.29}{}}
\newlabel{eqn:primal}{{2.30}{13}{Soft Margin Hyperplane\relax }{equation.2.30}{}}
\newlabel{eqn:nuprimal}{{2.31}{13}{Soft Margin Hyperplane\relax }{equation.2.31}{}}
\newlabel{eqn:nuprimalsubject}{{2.33}{13}{Soft Margin Hyperplane\relax }{equation.2.33}{}}
\@writefile{toc}{\contentsline {subsubsection}{Kernel Trick}{13}{section*.3}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.3.4}Support Vector Machines for Outlier Detection: One Class Support Vector Machine}{14}{subsection.2.3.4}}
\@writefile{toc}{\contentsline {section}{\numberline {2.4}Kernel Principal Compenent Analysis}{15}{section.2.4}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.4.1}Principal Component Analysis}{15}{subsection.2.4.1}}
\newlabel{eqn:covariance}{{2.40}{15}{Principal Component Analysis\relax }{equation.2.40}{}}
\newlabel{eqn:eigenvalue}{{2.41}{15}{Principal Component Analysis\relax }{equation.2.41}{}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.4.2}Kernel PCA as an Eigenvalue Problem}{15}{subsection.2.4.2}}
\newlabel{eqn:alpha1}{{2.45}{16}{Kernel PCA as an Eigenvalue Problem\relax }{equation.2.45}{}}
\newlabel{eqn:alpha2}{{2.46}{16}{Kernel PCA as an Eigenvalue Problem\relax }{equation.2.46}{}}
\newlabel{eqn:evprob}{{2.48}{16}{Kernel PCA as an Eigenvalue Problem\relax }{equation.2.48}{}}
\newlabel{eqn:kpca-A}{{2.57}{17}{Kernel PCA as an Eigenvalue Problem\relax }{equation.2.57}{}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.4.3}Projecting data}{17}{subsection.2.4.3}}
\newlabel{eqn:z}{{2.60}{17}{Projecting data\relax }{equation.2.60}{}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.4.4}Reconstruction error as novelty measure}{18}{subsection.2.4.4}}
\newlabel{eqn:recerror}{{2.61}{18}{Reconstruction error as novelty measure\relax }{equation.2.61}{}}
\@writefile{toc}{\contentsline {section}{\numberline {2.5}Classification and Regression Tree}{19}{section.2.5}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.5.1}Tree Growing}{20}{subsection.2.5.1}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.5.2}Splitting and Gini criterion}{20}{subsection.2.5.2}}
\newlabel{sec:gini}{{2.5.2}{20}{Splitting and Gini criterion\relax }{subsection.2.5.2}{}}
\newlabel{eqn:gini}{{2.68}{20}{Splitting and Gini criterion\relax }{equation.2.68}{}}
\newlabel{eqn:ginidecrease}{{2.70}{21}{Splitting and Gini criterion\relax }{equation.2.70}{}}
\@writefile{toc}{\contentsline {section}{\numberline {2.6}Random Forest}{21}{section.2.6}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.6.1}Construction of a Random Forest}{21}{subsection.2.6.1}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.6.2}Error estimation}{22}{subsection.2.6.2}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.6.3}Variable Importance}{22}{subsection.2.6.3}}
\@writefile{toc}{\contentsline {subsubsection}{Mean Decrease Gini}{22}{section*.4}}
\@writefile{toc}{\contentsline {subsubsection}{Mean Decrease Accuracy}{23}{section*.5}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.6.4}Proximity Matrix}{23}{subsection.2.6.4}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.6.5}Outlier Detection}{23}{subsection.2.6.5}}
\@writefile{toc}{\contentsline {subsubsection}{Artificial Outlier Examples}{23}{section*.6}}
\@writefile{toc}{\contentsline {subsubsection}{Outlyingness}{24}{section*.7}}
\gdef\scr@tpo@lst{0}
