\newproblem{lay:6_8_1}{
  % Problem identification
	\begin{large}
	  \hspace{\fill}\newline
    \textbf{Lay, 6.8.1}
	\end{large}
	\\
  \ifthenelse{\boolean{identifyAuthor}}{\textit{Carlos Oscar Sorzano, Aug. 31st, 2013} \\}{}

  % Problem statement
	Find the least-squares line $y=\beta_0+\beta_1x$ that best fits the data (-2,0), (-1,0), (0,2), (1,4), and (2,4), assuming that
	the first and last data point are less reliable. Weight them half as much as the three interior points.
}{
   % Solution
	The Weighted Least Squares solves the normal equations of the problem
	\begin{center}
		$WA\mathbf{\beta}=W\mathbf{y}$
	\end{center}
	being $W$ the weight matrix, $A$ the design matrix, $\mathbf{\beta}$ the unknown vector and $\mathbf{y}$ the observed vector. In this case,
	\begin{center}
		$\begin{pmatrix}\frac{1}{2} & 0  & 0 & 0 & 0 \\ 0 & 1 & 0 & 0 & 0 \\ 0 & 0 & 1 & 0 & 0 \\ 0 & 0 & 0 & 1 & 0 \\ 0 & 0 & 0 & 0 & \frac{1}{2}\end{pmatrix}
		  \begin{pmatrix} 1 & -2 \\ 1 & -1 \\ 1 & 0 \\ 1 & 1 \\ 1 & 2\end{pmatrix}\begin{pmatrix}\beta_0\\ \beta_1 \end{pmatrix}=
			\begin{pmatrix}\frac{1}{2} & 0  & 0 & 0 & 0 \\ 0 & 1 & 0 & 0 & 0 \\ 0 & 0 & 1 & 0 & 0 \\ 0 & 0 & 0 & 1 & 0 \\ 0 & 0 & 0 & 0 & \frac{1}{2}\end{pmatrix}
			\begin{pmatrix} 0\\ 0 \\ 2 \\ 4 \\ 4 \end{pmatrix}$
	\end{center}
	The normal equations of this problem are
	\begin{center}
		$(WA)^TWA\mathbf{\beta}=(WA)^TW\mathbf{y}$
	\end{center}
	and its solution
	\begin{center}
		$\hat{\mathbf{\beta}}=((WA)^TWA)^{-1}(WA)^TW\mathbf{y}$
	\end{center}
	In this particular case,
	\begin{center}
		$\hat{\mathbf{\beta}}=\begin{pmatrix} 2 \\ \frac{3}{2} \end{pmatrix}$
	\end{center}
	That is, the WLS line is $y=2+\frac{3}{2}x$ that is represented below along with the original data
	\begin{center}
		\includegraphics[scale=0.5]{Tema7/lay_6_8_1.eps}
	\end{center}
}
\useproblem{lay:6_8_1}
\ifthenelse{\boolean{eachProblemInOnePage}}{\newpage}{}

