\documentclass[10pt,a4paper]{article}
\usepackage[slantfont,boldfont]{xeCJK} % 允许斜体和粗体
\setCJKmainfont{SimSun}   % 设置缺省中文字体
\setCJKmonofont{SimSun}   % 设置等宽字体
\setmainfont{Times New Roman} % 英文衬线字体
\setmonofont{Times New Roman} % 英文等宽字体
\setsansfont{Times New Roman} % 英文无衬线字体
\begin{document} 
\section{Probability}
\subsection{Probability in life}
\[
\sum_{i=0}^n = X = \frac{\partial x}{\partial z}
\]





\subsection{Mean and S.D}
\[
\mu = \frac{\sum (X_i)}{n}, \qquad for \; i=0 \dots n
\]
\[
 SD = \sqrt {\frac {{(x_i - \mu)^2}} {n}}
\]



\subsection{Linear Regression}
\[
y_i = \beta_0 + \beta_1 x_i + u_i
\]
\[
\Rightarrow \qquad u_i= y_i  - \beta_0 + \beta_1 x_i 
\]
\[
\Rightarrow \qquad (u_i)^2= {(y_i  - \beta_0 + \beta_1 x_i )}^2
\]
\[
\Rightarrow \qquad \min (u_i)^2= \min {(y_i  - \beta_0 + \beta_1 x_i )}^2
\]
\\
\emph{这里我们将使用微积分来计算最小化$(u_i)^2$的$\beta_0$和$\beta_1$的值}




\subsection{Regression Assumptions}
\[
E(y)=\beta_1 + \beta_2 x_i + e\]
\[E(e)=0\]
\[var(y)=\sigma^2\]
\[cov(y_i,y_j)=0\]
\[y \rightarrow N[(\beta_1 + \beta_2 x), \sigma^2)]\]
\\
\emph{这里我们将使用微积分来计算最小化$(u_i)^2$的$\beta_0$和$\beta_1$的值}




\subsection{OLS}
\[\hat y=b_1+b_2 x\]
\[\hat e = y_t -\hat yt = y_1 - b_t - b_2 x_t\]
\[\sum \hat e_t^2 = \sum {(y_t - \hat y_t)^2}\]
\\
如何将Square最小化？OLS
\[S(\beta_1,\beta_2)=\sum_{i=1}^r (y_t -\beta_t -\beta_2 x_t)^2\]




\subsection{using the derivative for FOC =0 }
\[
\frac{\partial S}{\partial \beta_1} = 2T\beta_1 - 2\sum y_t + 2 \sum x_t \beta_2 =0 
\]
\[
\frac{\partial S}{\partial \beta_2} =  2\sum x_t^2 \beta_2 - 2 \sum x_t y_t + 2\sum x_t \beta_1 =0
\]
\\
\emph{now we use $b_1$ and $b_2$ to replace $\beta_1$ and $\beta_2$}
\[
2(\sum y_t - Tb_t - \sum x_t b_2) =0
\]
\[
2(\sum x_t y_t - \sum x_t b_t - \sum x_t^2 b_2) = 0
\]



\subsection{Here we solve the question with matrix}
$
\left|
\begin{array}{cc}
T  & {\sum x_t}  \\
{\sum x_t}  & {\sum x_t^2}  \\
\end{array}
\right|
$
$
\left|
\begin{array}{c}
\beta_1 \\
\beta_2
\end{array}
\right|
$
$=$
$
\left|
\begin{array}{c}
\sum y_t\\
\sum x_t y_t
\end{array}
\right|
$
\\ \\ \\
\emph{使用cramer定律，就可以计算$\beta_1$和$\beta_2$}
\[
\beta_2 = \frac {T \sum x_t y_t - \sum x_t \sum y_t} {T \sum x_t^2 - (\sum x_t)^2}
\]
\[
\Rightarrow \beta_2 = \sum w_t y_t \qquad where \qquad w_t = \frac{x_t -\bar x}{\sum (x_t - \bar x)^2} \qquad \sum w_t=0 \qquad and \sum w_t x_t=1 
\]



\[
\sum(x_t - \bar x)^2) = \sum  x_t^2 - \frac {(\sum x_t)^2}{T} , \qquad for \qquad \sum x_t = T \bar x
\]

\end{document}
