\select@language {english}
\contentsline {chapter}{\numberline {1}Introduction}{4}{chapter.1}
\contentsline {chapter}{\numberline {2}Theory}{5}{chapter.2}
\contentsline {section}{\numberline {2.1}Artificial outlier Generation}{5}{section.2.1}
\contentsline {subsection}{\numberline {2.1.1}Hypersphere}{5}{subsection.2.1.1}
\contentsline {subsection}{\numberline {2.1.2}Hypercube}{6}{subsection.2.1.2}
\contentsline {subsection}{\numberline {2.1.3}PERHAPS::Sample from marginal Distribution}{7}{subsection.2.1.3}
\contentsline {section}{\numberline {2.2}Reiceiver Operator Characteristic Graphs}{7}{section.2.2}
\contentsline {subsection}{\numberline {2.2.1}Confusion Matrix and Classifier Performance}{7}{subsection.2.2.1}
\contentsline {subsection}{\numberline {2.2.2}ROC Space}{8}{subsection.2.2.2}
\contentsline {subsection}{\numberline {2.2.3}ROC Curves and Area Under Curve}{8}{subsection.2.2.3}
\contentsline {section}{\numberline {2.3}Support Vector Machines}{10}{section.2.3}
\contentsline {subsection}{\numberline {2.3.1}Optimal Separating Hyperplane}{10}{subsection.2.3.1}
\contentsline {subsection}{\numberline {2.3.2}Support Vector Machines Algorithm}{10}{subsection.2.3.2}
\contentsline {subsection}{\numberline {2.3.3}Nonlinear Support Vector Classifier}{12}{subsection.2.3.3}
\contentsline {subsubsection}{Soft Margin Hyperplane}{13}{section*.2}
\contentsline {subsubsection}{Kernel Trick}{13}{section*.3}
\contentsline {subsection}{\numberline {2.3.4}Support Vector Machines for Outlier Detection: One Class Support Vector Machine}{14}{subsection.2.3.4}
\contentsline {section}{\numberline {2.4}Kernel Principal Compenent Analysis}{15}{section.2.4}
\contentsline {subsection}{\numberline {2.4.1}Principal Component Analysis}{15}{subsection.2.4.1}
\contentsline {subsection}{\numberline {2.4.2}Kernel PCA as an Eigenvalue Problem}{15}{subsection.2.4.2}
\contentsline {subsection}{\numberline {2.4.3}Projecting data}{17}{subsection.2.4.3}
\contentsline {subsection}{\numberline {2.4.4}Reconstruction error as novelty measure}{18}{subsection.2.4.4}
\contentsline {section}{\numberline {2.5}Classification and Regression Tree}{19}{section.2.5}
\contentsline {subsection}{\numberline {2.5.1}Tree Growing}{20}{subsection.2.5.1}
\contentsline {subsection}{\numberline {2.5.2}Splitting and Gini criterion}{20}{subsection.2.5.2}
\contentsline {section}{\numberline {2.6}Random Forest}{21}{section.2.6}
\contentsline {subsection}{\numberline {2.6.1}Construction of a Random Forest}{21}{subsection.2.6.1}
\contentsline {subsection}{\numberline {2.6.2}Error estimation}{22}{subsection.2.6.2}
\contentsline {subsection}{\numberline {2.6.3}Variable Importance}{22}{subsection.2.6.3}
\contentsline {subsubsection}{Mean Decrease Gini}{22}{section*.4}
\contentsline {subsubsection}{Mean Decrease Accuracy}{23}{section*.5}
\contentsline {subsection}{\numberline {2.6.4}Proximity Matrix}{23}{subsection.2.6.4}
\contentsline {subsection}{\numberline {2.6.5}Outlier Detection}{23}{subsection.2.6.5}
\contentsline {subsubsection}{Artificial Outlier Examples}{23}{section*.6}
\contentsline {subsubsection}{Outlyingness}{24}{section*.7}
