%%This is a very basic article template.
%%There is just one section and two subsections.
\documentclass{article}

\begin{document}
\title{Homework 5 by Congle Zhang}
\maketitle



\section{Problem 1}
The target function is to maximize the flow out of all $s_i$, which is 
the same as the flow into $t_i$. 
There are two kinds of constraints, (1) the sum of the flow on every edge is
less or equal to $c(u,v)$; (2) for every node, every product, the flow-in
equals the flow-out. Whenever there is an edge $(u,v)$ in the graph, we add $p$
variables into LP. There are at all $PE$ variables. Therefore, we can write the
into following LP:

$$
\begin{array}{ll}
max: & \sum_{i,v} p_i(s,v)  \\
\forall u,v; & \sum_i p_i(u,v) \leq c(u,v)\\
\forall i,u\neq s,t; & \sum_v p_i(v,u) -\sum_v p_i(u,v) = 0\\
\forall u,v,i; & p_i(v,u)\geq 0\\
\end{array}
$$


\section{Problem 2}
(a) We are to minimize $T$, the total time span. The constrains are: (1)For
every machine, the time cost on that machine should be less than $T$, (2)
$x_{ij} \in {0,1}$. (3) The sum of $x_ij$ is 1 for every job. Thus, we have 01
Linear Programming problem as:

$$
\begin{array}{ll}
min: & T \\
\forall j; & \sum_i x_{ij}p_{ij} \leq T\\
\forall i,j; & x_{ij} \in \{0,1\}\\
\forall i; & \sum_j x_{ij}=1\\
& T \geq 0
\end{array}
$$

(b) To get a linear relaxation on this problem, we have

$$
\begin{array}{ll}
min: & T \\
\forall j; &\sum_i x_{ij}p_{ij} \leq T\\
\forall i,j; & x_{ij}  \leq 1 \\
\forall i; & \sum_j x_{ij}=1\\
\forall i,j; & x_{ij}  \geq 0 \\
& T \geq 0
\end{array}
$$


(c) Translate the LP in (b) into standard form, we have
$$
\begin{array}{ll}
min: T& \\
\forall j; & T- \sum_i x_{ij}p_{ij} \geq 0\\
\forall i,j; & -x_{ij}  \geq -1 \\
\forall i; & \sum_j x_{ij} \geq	1\\
\forall i,j; & x_{ij}  \geq 0 \\
& T \geq 0
\end{array}
$$

There are $nm+1$ variables and $n+m+nm$ constrains in above LP. So the dual
problem has $n+m+nm$ variables and $nm+1$ constrains. Let the variables be
$u_1,\ldots u_m$; $v_{ij}$ ($i=\{1,\ldots n\},j=\{1,\ldots m\}$); $w_1,\ldots w_n$, we are
to

$$
\begin{array}{ll}
max: & -\sum_{i,j}v_{ij}+\sum_i w_i  \\
 & \sum_j u_j \leq 1\\
\forall i,j; & -p_{ij}u_j -v_{ij}+w_i \leq 0 \\
\forall i,j; & v_{ij},u_j,w_i \geq 0 \\
\end{array}
$$

To avoid confusion, in above equation, we use indicator $i$ to mean $1 \ldots
n$; $j$ to mean $1 \ldots m$. \\

(d) Since the dual problem and the primal problem has the same OPT value, we
have $OPT_{LP} = OPT_{dual-LP}$.

Any feasible point of $IP$ is also a feasible point of $LP$. Thus we have
$OPT_{LP}$ being better than $OPT_{IP}$. That is to say, $OPT_{LP} \leq
OPT_{IP}$.

Therefore, we have $OPT_{IP}\geq OPT_{LP} = OPT_{dual-LP}$.


(e) When $n=1$, we have

$$
OPT_{IP} = \min \{P_{1j}\}
$$

But for LP, we can assign $T^0=x_{11}^0 p_{11} = x_{12}^0 p_{12} \ldots =
x_{1m}^0 p_{1m}$. Together with $\sum_j x_{1j}^0=1$, we have

\[
T_0 = (1/p_{11}+1/p_{12} + \ldots + 1/p_{1m})^{-1}
\]

\textbf{Claim: } $T_0$ is the minimized $T$.

\textbf{Proof: } Suppose $T^*<T_0$, then $\forall j$, we have $x_{1j}^* <
x_{1j}^0$. Then we have $\sum_j x_{1j}^0<1$, contradiction.


Therefore, we have $OPT_{LP}=(1/p_{11}+1/p_{12} + \ldots + 1/p_{1m})^{-1}$. To
get the bound $R$, we have

\[
T_0 > (n/OPT_{IP})^{-1} = \frac{1}{n} OPT_{IP}
\]

The above equation comes from $1/p_{1j} > 1/OPT_{IP}$. 

Thus, we have $R=n$, that is 

\[
OPT_{IP} < n*OPT_{LP}
\]

(f) We have

$$
\begin{array}{ll}
T_j &= \sum_i x_{ij} p_{ij}\Longrightarrow\\
E(T_j) &= \sum_i E(x_{ij}) p_{ij}\\
&= \sum_i x^*_{ij} p_{ij}
\end{array}
$$

\section{Problem 3}
Let the weight of every set be $a_j$, $j=1,2,\ldots m$. 


We have $n$ experts standing for $n$ elements in $U$. Every time step $t$, we
pick an set $S_{jt}$.

The cost is defined as $m_i^j = 1/a_{jt}$ if $i\in S_{jt}$, otherwise $=0$. 

The weight is defined as $w_i^1=0$, and $w_i^{(t+1)}=0$ if $i \in \{S_{j1} \cup
S_{j2} \ldots \cup S_{jt}\}$, and $=1$ otherwise.


The adversary will chose the $S_{jt}$ with maximum votes as
$$
\sum_i p_i^t m_i^j  =  \sum_i w_i^t m_i^j / \phi(t)
$$
 
where $\phi(t) = \sum_i w_i^t$.

Now we should prove the greedy algorithm will produce a result no worse then
$\lceil \ln n \rceil OPT$

The key observation here is $1/OPT \leq \sum_{i \in S_{jt}} p_i^t m_i^j$

\textbf{Claim: } We have $1/OPT \leq \sum_{i \in S_{jt}} p_i^t m_i^j$. 

\textbf{Proof: } Consider the subsets in optimization solution, let their
weight be $b_1,b_2, \ldots b_\ell$, their size be $x_1,x_2, \ldots x_\ell$.
Without loss of generality, we assume $\forall i$, $x_1/b_1 \geq x_i/b_i $.
Then, we have

$$
\frac{x_1}{\sum_i x_i} \geq \frac{b_1}{\sum_i b_i}
$$

Above equation can be obtained by simple calcuations. Then we have

$$
\frac{\phi(0)}{OPT} \leq \frac{x_1}{b_1} \leq \sum_{i \in S_{j0}} w_i^0 m_i^j
$$

The most right $\leq$ comes from the fact that we pick the largest $\sum_i
w_i^0 m_i^j$ at step 0. Then there is $1/OPT \leq \sum_{i \in S_{j0}} p_i^0
m_i^j$

With the same methods, we can prove $\forall t$, $1/OPT \leq \sum_{i \in S_{jt}}
p_i^t m_i^j$. 

Therefore, if we pick $S_j$ for the $t$ step, we have 
$$
\sum_{i \in S_{jt}} w_i^t \geq \frac{\phi(t) a_{jt}}{OPT}
$$

Then we have
$$
\phi(t+1)\leq \phi(t)-\frac{\phi(t)a_j}{OPT} \leq \phi(t)e^{-\frac{a_j}{OPT}}
$$

With the same method in class, we have $\sum_i a_i < \lceil \ln n \rceil OPT$.
The left side is the sum of the weight of the greedy algorithm. 

\end{document}