\documentclass[UTF8]{ctexart}
\usepackage{amsmath}
\usepackage{amsthm,amsmath,amssymb}
\usepackage{cite}
\usepackage{mathrsfs}
\usepackage{mathrsfs}
\usepackage{graphicx}
\usepackage{subfigure}
\usepackage{epstopdf}
\usepackage{indentfirst}
\usepackage{caption}
\usepackage{lmodern}
\usepackage{float}
\usepackage{amssymb}
\usepackage{bbm}
\usepackage{dsfont}
\usepackage{bbold}
\usepackage{mathbbol}
\usepackage{newtxmath}
\usepackage{geometry}
\setlength{\parskip}{0.5em}
\geometry{a4paper,scale=0.8}
\pagenumbering{arabic}


% info
\title{机器学习第五次作业}
\author{刘本宸 22920202200764}
\date{\today}
\begin{document}
\maketitle
\tableofcontents
\makeatletter
\def\@cite#1#2{\textsuperscript{[{#1\if@tempswa , #2\fi}]}}
\makeatother
\makeatletter %使\section中的内容左对齐
\renewcommand{\section}{\@startsection{section}{1}{0mm}
    {-\baselineskip}{0.5\baselineskip}{\bf\leftline}}
\makeatother
\newpage
\section{EM Algorithm - Case1}
\paragraph{
    给定数据集有：$\mathbf{D} = \{<x_i, y_i>\}_i^n,  x_i \in \mathbb R^d , y_i \in \{ 0,1\}$
    当$x$为连续随机变量的时候，我们假定随机变量$x$是符合Gauss分布的。我们有$x|y=0 \sim \mathcal{N}(x | \mu_0,\Sigma_0) ~~ x|y=1 \sim \mathcal{N}(x | \mu_1,\Sigma_1)$,
    我们使用0-1分布来建模y，即$P(y=1) = p ~~ P(y=0) = 1-p$。
}
\paragraph{我们可以使用对数极大似然函数来计算参数，并且使用条件概率公式将其变形：}
\begin{equation}
    lnP(D) = ln(\prod_{i=1}^nP(x_i,y_i;\theta)) = \sum_{i=1}^nlnP(x_i,y_i;\theta) = \sum_{i=1}^nln(P(y_i;\theta)\sum_{i=1}^nlnP(x_i|y_i;\theta))
\end{equation}
\paragraph{带入x和y对应的概率模型可以把上述等式变成：}
\begin{equation}
    \begin{split}
        & lnP(D) =\sum_{i=1}^n \left[ y_i ln p + (1-y_i)ln(1-p)\right] \\
        &  + \sum_{i=1}^n 1 \left\{  y_i = 0 \right\}ln\left[(2 \pi) ^{-\frac{d}{2}} det(\Sigma_0)^{-\frac{1}{2}} \exp\{-\frac{1}{2}(x_i - \mu_0)^T \Sigma_0^{-1}(x_i - \mu_0) \} \right] \\
        &  + \sum_{i=1}^n 1 \left\{  y_i = 1 \right\}ln\left[(2 \pi) ^{-\frac{d}{2}} det(\Sigma_1)^{-\frac{1}{2}} \exp\{-\frac{1}{2}(x_i - \mu_1)^T \Sigma_1^{-1}(x_i - \mu_1) \} \right] \\
    \end{split}
\end{equation}
\paragraph{记$n_1 = \sum_{i=1}^n y_i,n_0 = \sum_{i=1}^n (1-y_i) $,通过作业中给定的提示对p求导数，可以得到：}
\begin{equation}
    p = \frac{n_1}{n_1+n_0}
\end{equation}
\paragraph{已知二次型的求导公式为\cite{cite1}：$\frac{d(\vec{x}^TA\vec{x})}{d\vec{x}} = \vec{x}^T (A^T+A) = (A+A^T)\vec{x}$，在这里因为协方差矩阵的性质我们有：$A = A^T$，然后对$\mu_0$求梯度可以得到：}
\begin{equation}
    \begin{split}
        & \nabla_{\mu_0}\mathbf{L}_D(\theta) = \frac{\partial \sum_{i=1}^n 1 \{ y_i=0 \} -\frac{1}{2} (x_i-\mu_0)^T \Sigma^{-1} (x_i-\mu_0) }{\partial \mu_0} \\
        & = \sum_{i=1}^n \frac{\partial \sum_{i=1}^n 1 \{ y_i=0 \} \frac{1}{2} (x_i-\mu_0)^T \Sigma^{-1} (x_i-\mu_0)}{\partial (x_i - \mu_0)} \\
        & = \sum_{i=1}^n 1 \{ y_i=0 \}\Sigma^{-1}(x_i - \mu_0)
    \end{split}
\end{equation}
\paragraph{令上述梯度等于0我们可以得到:}
\begin{equation}
    \begin{split}
        & \sum_{i=1}^n 1 \{ y_i=0 \}x_i = n_0\mu_0 \\
        & \mu_0 = \frac{\sum_{i=1}^n 1 \{ y_i=0 \}x_i}{n_0}
    \end{split}
\end{equation}
\paragraph{同样的方法也适用于$\mu_1$的求解，所以：}
\begin{equation}
    \mu_1 = \frac{\sum_{i=1}^n 1 \{ y_i=1 \}x_i}{n_1}
\end{equation}
\paragraph{已知$\Sigma_0 = \Sigma_0^T \quad \Sigma_0^{-1}= (\Sigma_0^{-1})^T$,对$\Sigma_0$求梯度\cite{cite2}我们可以得到：}
\begin{equation}
    \begin{split}
        & \nabla_{\Sigma_0}\mathbf{L}_D(\theta) = -\frac{1}{2} \sum_{i=1}^n 1 \{ y_i=0 \}
        \left[ \frac{\partial (ln det(\Sigma_0))}{ \partial \Sigma_0} +\frac{\partial ((x_i-\mu_0)^T \Sigma^{-1} (x_i-\mu_0))}{\partial \Sigma_0} \right] \\
        & = -\frac{1}{2} \sum_{i=1}^n 1 \{ y_i=0 \} \left[ \Sigma_0^{-1} - \Sigma_0^{-1}(x_i-\mu_0)(x_i-\mu_0)^T \Sigma_0^{-1}\right]
    \end{split}
\end{equation}
\paragraph{令上述梯度等于0我们可以得到$\Sigma_0$，同样可以一并求出 $\Sigma_1$：}
\begin{equation}
    \begin{split}
        & \Sigma_0 = \frac{1}{n_0}\sum_{i=1}^n 1 \{ y_i=0 \}(x_i-\mu_0)(x_i-\mu_0)^T \\
        & \Sigma_1 = \frac{1}{n_1}\sum_{i=1}^n 1 \{ y_i=1 \}(x_i-\mu_1)(x_i-\mu_1)^T
    \end{split}
\end{equation}
\paragraph{}
\section{EM Algorithm - Case2}

\paragraph{根据已知我们可以书写对数似然函数：}
\begin{equation}
    \begin{split}
        &lnP(D) = \sum_{i=1}^n(ln(p^{y_i}(1-p)^{1-y_i})) +\\
        & \sum_{i=1}^n\sum_{j = 1}^d\mathds{1} \{ y_i=0 \}ln (\phi_{0j}^{x_{ij}} (1-\phi_{0j})^{1- x_{ij}}) + \\
        & \sum_{i=1}^n\sum_{j = 1}^d \mathds{1} \{ y_i=1 \}ln (\phi_{1j}^{x_{ij}} (1-\phi_{1j})^{1- x_{ij}})
    \end{split}
\end{equation}
\paragraph{求解p我们可以使用与Case1相同的方法，因此在这里就不再赘述：}
\begin{equation}
    p = \frac{n_1}{n_1+n_0}
\end{equation}
\paragraph{已知$\phi_0, \phi_1 \in \mathbb{R}^d$, 我们对于$\phi_0$的第j个元素求偏导有：}
\begin{equation}
    \frac{\partial lnP(D)}{\partial \phi_{0j}} = \sum_{i=1}^n \mathds{1} \{ y_i=0 \} \left[ \frac{x_{ij}}{\phi_{0j}} -  \frac{1- x_{ij}}{1- \phi_{0j}} \right]
\end{equation}
\paragraph{令上述导数等于0即可得出$\phi_0$，而且我们可以用同样的方法求出$\phi_1$：}
\begin{equation}
    \begin{split}
        \phi_{0j} = \frac{\sum_{i=1}^n \mathds{1} \{ y_i=0 \} x_{ij}}{n_0} \quad j \in \left\{ 0,1,2,\dots,d \right\}
        \\
        \phi_{1j} = \frac{\sum_{i=1}^n \mathds{1} \{ y_i=1 \} x_{ij}}{n_1} \quad j \in \left\{ 0,1,2,\dots,d \right\}
        \\
        \phi_0 = (\phi_{01},\phi_{02},\dots,\phi_{0d})^T \quad \phi_1 = (\phi_{11},\phi_{12},\dots,\phi_{1d})^T
    \end{split}
\end{equation}
\paragraph{本次作业到此结束。}
\newpage
\begin{thebibliography}{1}
    \bibitem{cite1}
    二次型求导的推导过程 \quad https://www.zhihu.com/question/22455493
    \bibitem{cite2}
    对二次型系数矩阵的行列式求导 \quad homework5.pdf
\end{thebibliography}
\end{document}