\documentclass[11pt,letterpaper]{article}

\newcommand{\mytitle}{CS369N Homework 1}
\newcommand{\myauthor}{Kevin Lewi \\ \small \textit{Collaborated with: Manas 
Joglekar}}
\date{October 20, 2011}

\usepackage{hwformat}

\begin{document}

\maketitle

\newcommand{\cost}{\mathsf{cost}}

\section*{Problem 1}

\subsection*{Part A}

\begin{lem}
	There is no algorithm $A$ such that for all algorithms $B$ and all inputs $Z$,
	\[ \cost(A,Z) \leq \cost(B,Z). \]
\end{lem}

\begin{proof}
	Assume to get a contradiction that there exists an instance-optimal algorithm 
	$A$. Let $j$ be the index in the array that $A$ first queries. Now, consider 
	the following algorithm $B$ and instance $Z$: algorithm $B$ first queries 
	array index $i \neq j$. If it has found the element, then it is done; 
	otherwise, it runs a normal binary search. Let the instance $Z$ be such that 
	the element to be found in the array happens to be at index $i$. Then, note 
	that $\cost(A,Z) > 1$, since the element does not lie at index $j$. However, 
	$\cost(B,Z) = 1$ since the element lies at index $i$. Thus, $A$ cannot be 
	instance-optimal.
\end{proof}

\subsection*{Part B}

\begin{lem}
	Any algorithm $A$ must make at least $n-1$ comparisons and the union of the 
	elements involved in the comparisons must be of size $n$.
\end{lem}

\begin{proof}
	If an algorithm $A$ makes at most $n-2$ comparisons, then let $G$ be a graph 
	of $n$ vertices where each vertex is associated with an element in the array, 
	and there is a directed edge from $u$ to $v$ if the element corresponding to 
	$u$ is compared to the element corresponding to $v$, and $v$ is larger. There 
	are at most $n-2$ edges in this graph of $n$ vertices, so there are at least 
	two components in $G$, call them $C_1$ and $C_2$. If $A$ places the elements 
	of $C_1$ before $C_2$ in the array, then there exist examples where the sorted 
	array conists elements of $C_2$ first. By symmetry, we can also reach a 
	contradiction if $A$ places the elements of $C_2$ before $C_1$. Thus, no such 
	$A$ can exist.
\end{proof}

Assume for a contradiction that there exists an instance-optimal algorithm $A$. 
Let $\sigma$ be the sequence of pairs of elements that are compared by $A$.
By the previous lemma, $|\sigma| \geq n-1$. For an instance $X$, let $t(X, 
\sigma_i)$ be the output result of the comparison oracle for the $i$th 
comparison query. Also, let $Z$ be an instance where $\sigma_1$ outputs 
"larger", and $\sigma_j$ for all $j \in [2,n-1]$ outputs "smaller". Then, 
$\cost(A,Z) > n-1$, since $A$ does not know if the first element it compared is 
the maximum element of the array.

Let $\pi$ be the sequence of indices of $\sigma$ such that $\sigma_{\pi_1}, 
\cdots, \sigma_{\pi_n}$ is in sorted order. Then, let algorithm $B$ be as 
follows: first, make $n-1$ comparisons between $(\sigma_{\pi_j}, 
\sigma_{\pi_{j+1}})$ for all $j \in [1,n-1]$. If all outputs are "smaller", then 
output the elements in sorted order $\sigma_{\pi_1}, \cdots, \sigma_{\pi_n}$. 
Else, run bubble sort.

Note that $B$ is a valid algorithm for all instances since in the worst case it 
runs bubble sort, which will correctly sort any instance. However, $\cost(B,Z) = 
n-1 < \cost(A,Z)$, which means that $A$ cannot be instance-optimal.

\subsection*{Part C}

Let $P$ be a computational problem along with an associated $\cost$ function on 
algorithms and their inputs. Let $\mathcal{A}_P$ be the set of all algorithms 
that solve $P$ and $\mathcal{I}$ be the set of all instances. Let $\alpha(A) = 
\max_{Z \in \mathcal{I}} \cost(A,Z)$, so that $\alpha(A)$ corresponds to the 
worst-case analysis of the cost of $A$. Also, let $Z^*$ be the instance for 
which $\cost(A,Z^*) = \alpha(A)$ (hence, the worst instance for $A$).

\newcommand{\decide}{\text{decide}}

Define a new computational problem $\decide(Y,Z)$ for two instances $Y$ and $Z$ 
to be the problem of determining whether $Y = Z$. Now, let $\beta(A) = \max_{Y,Z 
\in \mathcal{I}} \cost(A,\decide(Y,Z))$ for an algorithm that solves 
$\decide(Y,Z)$. In words, $\beta$ represents taking the worst-case amount of 
time it takes for an algorithm $A$ to determine if $Y = Z$ over all inputs $Y$ 
and $Z$.

\begin{lem}
	The case where $\beta(B) < \alpha(A)$ for algorithms $B$ that solve 
	$\decide(Y,Z)$, and algorithms $A$ that solve $P$ is sufficient to conclude 
	that there is no instance-optimal algorithm for $P$.
\end{lem}

\begin{proof}
	We can construct a very simple algorithm $C$ that solves $P$ as follows: upon 
	receiving an input $Z$, run algorithm $B$ to see if $Z = Z^*$ (recall that 
	$Z^*$ is the algorithm for which $A$ performs the worst on). If they are 
	equal, then output the correct solution for the instance --- otherwise, run 
	$A$.

	Note that $\cost(C,Z^*) = \beta(B)$ by definition. Also, $\cost(A,Z^*) = 
	\alpha(A)$. Since $\beta(B) < \alpha(A)$, we get that $\cost(C,Z^*) < 
	\cost(A,Z^*)$, which implies that $A$ is not instance-optimal.
\end{proof}

\section*{Problem 2}

\subsection*{Part A}

It is enough to show that for every permutation $\pi_1$ of $S$, there exists a 
permutation $\pi_2$ of $S$ such that $\cost(FF,\pi_1(S)) \leq \cost(B, 
\pi_2(S))$ for any greedy online algorithm $B$. Recall that upon depositing a 
new piece, FF considers the bins from left to right and places the new piece in 
the leftmost bin that can hold the piece. Fix $\pi_1$, and let the bins that FF 
uses be labeled $B_1, \ldots, B_m$ (ordered from left to right). Hence, 
$\cost(FF,\pi_1(S)) = m$.

We construct a permutation $\pi_2$ of $S$ as follows: all pieces placed by FF in 
$B_i$ are requested before the pieces placed by FF in $B_{i+1}$, for all $i \in 
[1,m-1]$. 

\begin{lem}
	\[ \cost(B,\pi_2(S)) \geq \cost(B,\pi_1(S)). \]
\end{lem}

\begin{proof}
	We just need to show that $B$ must use at least $m$ bins when dealing with 
	sequence $\pi_2(S)$. Let $C_1, \cdots, C_q$ be the bins that algorithm $B$ 
	uses, ordered with respect to the order that algorithm $B$ starts each bin.
	
	We will say that a piece $p \in B_i(FF)$ if FF placed $p$ in $B_i$ under 
	$\pi_1(S)$, and $p \in C_i(B)$ if $B$ placed $p$ in $C_i$ under $\pi_2(S)$. 
	Now, we show that if $p \in B_i(FF)$, then $p \in C_i(B)$. We induct on $i$, 
	so we first consider $i=1$. Note that $p \in B_1(FF)$ implies that $p \in 
	C_1(B)$ since all pieces of $B_1(FF)$ were able to fit in the same bin. Assume 
	inductively that the claim is true for all $j < i$; we would like to show that 
	$p \in B_i(FF)$ still implies that $p \in C_i(B)$.

	A piece $p \in B_i(FF)$ was placed in $B_i$ because it did not fit in $B_j$ 
	for $j \in [1,i-1]$. Thus, since all pieces that have been placed in such 
	$B_j$ have already been placed under $\pi_2(S)$, $p$ will not be able to fit 
	in $C_j$ for $j \in [1,i-1]$, either. However, $p$ will be able to fit in 
	$C_i$ since $p$ was able to fit in $B_i$. Thus, $p \in C_i$ as desired, which 
	completes the induction. We can therefore conclude that the number of bins 
	used by $B$ under $\pi_2(S)$ is at least the number of bins used by FF under 
	$\pi_1(S)$, which establishes the claim.
\end{proof}

By the above lemma, we get that $\max_{\pi}(\cost(FF,\pi(S))) \leq 
\max_{\pi}(\cost(B,\pi(S)))$.

\subsection*{Part B}

For any $n$, define the canonical multiset $S^* = \{(1/2, 2n),(1/(2n), 2n)\}$ 
and the canonical sequence \[ \pi^* = \pi(S^*) = 1/2,1/(2n),1/2,1/(2n), \cdots, 
1/2, 1/(2n). \] In other words, $\pi^*$ is alternating between piece sizes $1/2$ 
and $1/(2n)$ for a total of $2n$ times.

Next, consider the greedy online algorithm FFI (first-fit increasing): sort the 
used bins in order of increasing capacity (so, the first bin considered will 
have the most space avaiable), and place the new piece into the first bin it 
fits into.

We also define FFD (first-fit decreasing) to be similar to FFI, but to sort the 
bins in order of decreasing capacity (so that the most full bin is ordered 
first), placing the new piece into the first bin it fits into.

\begin{lem}
	$\cost(FFI,\pi^*) = 2n$.
\end{lem}

\begin{proof}
	$FFI$ will place $1/2$ and $1/(2n)$ in the same bin, then start a new bin for 
	$1/2$, then place $1/(2n)$ in the second bin, then start a new bin for $1/2$, 
	etc. Thus, each bin will contain exactly one piece of size $1/2$ and one piece 
	of size $1/(2n)$, which means that $FFI$ uses $2n$ bins.
\end{proof}

\begin{lem}
	$\max_{\pi}(\cost(FFD, \pi(S^*))) \leq n+2$.
\end{lem}

\renewcommand{\span}{\mathsf{span}}

\begin{proof}
	Suppose that FFD has already matched all of the pieces to bins. Define the 
	span of FFD, $\span(FFD)$, to be the set of bins which contain pieces of size 
	$1/(2n)$. We want to claim that $\#\span(FFD) \leq 2$.
	
	To see this, let $b \in \span(FFD)$ be the bin that a piece of size $1/(2n)$ 
	was first placed into. Since there exists at least one piece of size $1/(2n)$ 
	in $b$, there cannot exist two pieces of size $1/2$ in $b$. Thus, there is at 
	most one piece of size $1/2$, which means there are either $2n$ pieces or $n$ 
	pieces of size $1/(2n)$ in $b$. If there are $2n$ pieces, then $\#\span(FFD) = 
	1$ and we are done. For the latter case, let $b'$ be the bin that a piece of 
	size $1/(2n)$ (that was not the first $n$ pieces of size $1/(2n)$) was first 
	placed into. By the same argument, there must be at least $n$ pieces of size 
	$1/(2n)$ in this bin $b'$. Thus, all $2n$ pieces of size $1/(2n)$ have been 
	accounted for, yielding $\#\span(FFD) \leq 2$.

	Now, note that all $2n$ pieces of size $1/(2n)$ are packed into at most $\# 
	\span(FFD)$ bins, and the $2n$ pieces of size $1/2$ are packed into $n$ bins, 
	so that the cost of $FFD$ on this permutation is $n+\# \span(FFD) \leq n+2$.
\end{proof}

Thus, we see that there exists a permutation $\pi^*$ such that $\cost(FFI,\pi^*) 
> \max_{\pi}(\cost(FFD,\pi(S^*)))$, which proves that FFI is not order-oblivious 
instance-optimal among greedy online algorithms.

\subsection*{Part C}

\begin{lem}
	The minimum such $\alpha \geq 1$ such that for every two greedy algorithms $A$ 
	and $B$ and every mutliset $S$ of items, $\max_{\pi}(\cost(A,\pi(S))) \leq 
	\alpha \cdot \max_{\pi}(\cost(B,\pi(S)))$ is $\alpha = 2$.
\end{lem}

\begin{proof}
	For a multiset of pieces $S$, let $c(S)$ represent the sum of the piece sizes 
	of the elements of $S$. We will show that for any greedy online algorithm $A$, 
	$c(S) \leq \cost(A,\pi(S)) \leq 2 \cdot c(S) +1$ for any ordering $\pi$. The 
	first inequality is obvious, since if $\cost(A,\pi(S)) < c(S)$, then summing 
	the capacities of each bin in the packing used by $A$ would be less than the 
	sum of the piece sizes of $S$, which is clearly impossible.
	
	For the second inequality, note that in a packing for $A$, at most one bin can 
	be filled to less than half capacity. If this were not the case, then let 
	$b_1$ and $b_2$ be two bins that are filled to less than half capacity, and 
	without loss of generality, assume $b_1$ was started before $b_2$. Let $p$ be 
	the first piece placed in $b_2$. Note that the size of $p$ is at most $1/2$, 
	and $b_1$ can fit a piece of size at most $1/2$, which means that $A$ would 
	have placed $p$ in $b_1$ instead of $b_2$, a contradiction. Hence, let $b^*$ 
	be the unique bin that is filled to at most half capacity. Let the set $B$ 
	represent the bins used by $A$'s packing, and note that the total amount of 
	size accounted for by the set of bins $B \setminus \{b^*\}$ is at least $1/2 
	\cdot |B|$, which means that $c(S) \geq 1/2 \cdot |B|$. Therefore, $\cost(A) = 
	|B|+1 \leq 2 \cdot c(S) + 1$.

	Thus, since under any ordering of $S$, any two online greedy algorithms have 
	costs that lie within the interval $[c(S), 2 \cdot c(S) + 1]$, we can say that 
	$\max_{\pi}(\cost(A,\pi(S))) \leq 2 \max_{\pi}(\cost(B,\pi(S)))$.

	To see that $2$ is the minimum $\alpha$ for which this is true, consider the 
	example in Part B of $S^*$, where $\cost(FFI,\pi^*) = 2n$ and 
	$\max_{\pi}(\cost(FFD, \pi(S^*))) \leq n+2$. Thus, as $n$ tends toward 
	$\infty$, \[ \lim_{n \to \infty} \frac{\max_{\pi}(\cost(FFI, 
	\pi(S^*)))}{\max_{\pi}(\cost(FFD, \pi(S^*)))} \geq \lim_{n \to \infty} 
	\frac{2n}{n+2} = 2, \] which establishes the lower bound of $\alpha \geq 2$.
\end{proof}

\section*{Problem 3}

\subsection*{Part A}

This fact alone does not have any implications for average-case analysis. One 
might first suspect that on average, $A$ costs at most $B$, but this is not 
necessarily the case. The reason for the discrepancy is that the probability 
that $z$ occurs could differ from the probability that $M(z)$ occurs. For a 
small but concrete example, consider the following case:

Let $I = {x,y}$ be the set of inputs (so, there are only two possible inputs). 
Suppose we have a distrubtion $D$ on the inputs such that $x$ occurs with 
probability $9/10$ and $y$ occurs with probability $1/10$. Also, suppose that 
$\cost(A,x) = 3$ and $\cost(A,y) = 1$. Also, $\cost(B,x) = 2$ and $\cost(B,y) = 
4$. Then, one could construct the perfect matching $M$ such that $M(x) = y$ and 
$M(y) = x$. Note that $\cost(A,x) = 3 < \cost(B,M(x)) = \cost(B,y) = 4$, and 
$\cost(A,y) = 1 < \cost(B,M(y)) = \cost(B,x) = 2$. Thus, we can conclude that $A 
\leq B$, yet $E[\cost(A)] = 14/5$ and $E[\cost(B)] = 11/5$. Thus, in this 
example, $A$ has higher cost than $B$ on average.

\subsection*{Part B}

Consider $N=5,k=2,n=5$ ($5$ pages, size $2$ cache, sequences of $5$ requests). 
We will count the number of sequences in which FWF and LRU both fault on all $5$ 
page requests, and we will show that there are more such sequences for FWF than 
LRU.

\begin{lem}
	There are $720$ sequences for which FWF faults on all $5$ requests.
\end{lem}

\begin{proof}
	We have $N$ choices for the first page and $N-1$ choices for the second page, 
	since the cache starts out empty. Then, we have $N-2$ choices for the third 
	page (since it cannot be either the first or second page). The cache is then 
	cleared by FWF, and we have $N-1$ choices for the fourth page. Finally, we 
	have $N-2$ choices for the fifth page since we cannot request the third or 
	fourth page. Thus, there are $N(N-1)(N-2)(N-1)(N-2)$ sequences for which FWF 
	faults on all requests, and for $N=5$ this is $720$.
\end{proof}

\begin{lem}
	There are $540$ sequences for which LRU faults on all $5$ requests.
\end{lem}

\begin{proof}
	We have $N$ and $N-1$ choices for the first and second page, respectively. We 
	have $N-2$ choices for each the third, fourth, and fifth pages to fault, 
	resulting in $N(N-1)(N-2)^3$ sequences for which LRU faults on all requests, 
	and for $N=5$ this is $540$.
\end{proof}

Thus, any bijection $M : I_n \rightarrow I_n$ will be such that there are $180$ 
instances $z$ where $\cost(FWF,x) = 5$ but $\cost(LRU,M(z)) \leq 4$. Thus, it is 
not the case that $FWF \leq_n LRU$.

\subsection*{Part C}

We first introduce a new function: let $d(A,z)$ represent the number of pages in 
the cache after an online algorithm $A$ services the request sequence $z$. Note 
that $d(A,z) \leq k$ since there can be at most $k$ pages in the cache. For any 
two lazy online paging algorithms $A$ and $B$, we will show the existence of a 
bijection $M_n : I_n \to I_n$ such that for $z \in I_n$, $\cost(A,z) \leq 
\cost(B,M_n(z))$ and $d(A,z) = d(B,M_n(z))$. This will be done by induction on 
$n$. 

For $n=1$, since the caches start out empty, the first page will always be a 
fault. Thus, $\cost(A,z) = 1 = \cost(B,M_n(z))$ for any bijective mapping $M_1$ 
and any $z \in I_1$. Also, the first page is placed into the cache, so that 
$d(A,z) = 1 = d(B,M_{n-1}(z))$ as well. Now, assume inductively that there 
exists a bijection $M_{n-1}$ such that $\cost(A,z) \leq \cost(B, M_{n-1}(z))$ 
and $d(A,z) = d(B,M_n(z))$ for all $z \in I_{n-1}$.

\begin{lem}
	There is a bijection $M_n$ such that $\cost(A,z) \leq \cost(B, M_n(z))$ and 
	$d(A,z) = d(B,M_n(z))$ for all $z \in I_n$.
\end{lem}

\begin{proof}
	We will use $M_{n-1}$, the bijection for all sequences of length $n-1$, to 
	build the desired $M_n$. Let $y \in I_{n-1}$. We will denote by $ya$ the 
	concatenation of the page request $a$ to the sequence of page requests $y$. 
	Thus, $ya \in I_n$.
	
	Now, we can partition the set $I_1$ into $\kappa(A,y)$ and $\lambda(A,y)$, 
	where $\kappa(A,y)$ is the set of $a \in I_1$ such that $A$ will fault on page 
	request $a$ after serving sequence $y$, and $\lambda$ is the set of $a \in 
	I_1$ such that $A$ will \textbf{not} fault on page request $a$ after serving 
	sequence $y$. Thus, $|\kappa(A,y)| = d(A,y)$ and $|\lambda(A,y)| = N - 
	d(A,y)$. We can define $\kappa(B,M_{n-1}(y))$ and $\lambda(B,M_{n-1}(y))$ 
	similarly.
	
	Recall that $d(A,y) = d(B, M_{n-1}(y))$, and hence $|\kappa(A,y)| = 
	|\kappa(B,M_{n-1}(y))|$ and $|\lambda(A,y)| = |\lambda(B,M_{n-1}(y))|$. Thus, 
	we can create a bijection $B : I_1 \to I_1$ as follows: Let $B_{\kappa}$ be 
	any bijection such from $\kappa(A,y)$ to $\kappa(B,M_{n-1}(y))$, and 
	$B_{\lambda}$ be any bijection from $\lambda(A,y)$ to $\lambda(B,M_{n-1}(y))$. 
	Then, for $a \in I_1$, if $a \in \kappa(A,y)$, then $B(a) = B_{\kappa}(a)$, 
	and $B(a) = B_{\lambda}(a)$ otherwise. Now, let $z \in I_n$, and let $y \in 
	I_{n-1}$ and $a \in I_1$ be such that $ya=z$. Then we define the bijection 
	$M_n : I_n \to I_n$ to be such that
	\[ M_n(z) = M_{n-1}(y) B(a). \]
	Since $M_{n-1}$ and $B$ are both bijections, it is clear that $M_n$ is also a 
	bijection. It remains to be shown that $\cost(A,z) \leq \cost(B, M_n(z))$ and 
	$d(A,z) = d(B,M_n(z))$.
	
	By the inductive assumption, $\cost(A,y) \leq \cost(B, M_{n-1}(y))$. Since 
	either $a$ and $B(a)$ both cause a fault in $A$ after reading $y$ (and $B$ 
	after reading $M_{n-1}(y)$, respectively), or $a$ and $B(a)$ both do not cause 
	a fault, then for $x \in \{0,1\}$, $\cost(A,z) = \cost(A,y) + x$ and 
	$\cost(B,M_n(z)) = \cost(A,M_{n-1}(y)) + x$. In particular, $\cost(A,z) \leq 
	\cost(B,M_n(z))$. For the latter claim, if it is the case that $a$ and $B(a)$ 
	do not cause faults in their respective situations, or if $d(A,z) = k$, then 
	$d(A,z) = d(A,y) = d(B,M_{n-1}(y)) = d(B,M_n(z))$. Otherwise, if $a$ and 
	$B(a)$ both cause faults, then $d(A,z) = d(A,y)+1 = d(B,M_{n-1}(y))+1 = 
	d(B,M_n(z))$. This completes the induction.
\end{proof}

Since we have shown the existence of a bijection $M_n$ for all $n$ such that 
$\cost(A,z) \leq \cost(B,M_n(z))$ for all $z \in I_n$, we can conclude that $A 
\leq_n B$. By simply switching the roles of $A$ and $B$, we can also obtain that 
$B \leq_n A$.

\end{document}
