\section{Mixing Time of Regular Dynamic Graphs} \label{mixing_time}
%\subsection{Definition of (dynamic) mixing time}\label{formal-def-mixing-time}
We have discussed the notion of random walk, probability distribution of a random walk , mixing time etc. on (dynamic) graphs in Section~\ref{sec:rwd}. Here we formally define those notions.  
\begin{definition} [Distribution vector]
Let $\pi_x(t)$ define the probability distribution vector reached after $t$ steps when the initial distribution starts with probability $1$ at node $x$. Let $\pi$ denote the stationary distribution vector.
\end{definition}

\begin{definition}\label{def:mix-static} [$\tau^x(\epsilon)$ ($\epsilon$-near mixing time for source $x$), $\tau^x_{mix}$ (mixing time for source $x$) and $\tau_{mix}$ (mixing time)]
Define $\tau^x(\epsilon) = \min t : ||\pi_x(t) - \pi|| < \epsilon$. Define $\tau^x_{mix} = \tau^x(1/2e)$. Define $\tau_{mix} = \max_{x} \tau^x_{mix}$. ($\epsilon$ is a small constant). 
\end{definition}

We define the {\em dynamic mixing time} of a $d$-regular evolving graph $\mathcal{G} = G_1, G_2, \ldots$ as the maximum time taken for a simple random walk starting from any node to reach {\em close to} the uniform distribution on the vertex set. Therefore the definition of dynamic mixing time  is similar to the static case.   Let $\tau$ be the maximum mixing time of any (individual) graph $G_t$ in $\mathcal{G}$. (Note that  $\tau$ is bounded by $\tilde{O}(n^2)$ --- follows from Theorem \ref{thm:mixtime} and Corollary \ref{cor:second-eigen-bound}).  We show that dynamic mixing time is well defined due to Theorem \ref{thm:mixtime} and monotonicity property of distribution (cf. Section \ref{sec:monotonicity}).   

\begin{definition}\label{def:mix-dynamic}[Dynamic mixing time]
Define $\tau^x(\epsilon)$ ($\epsilon$-near mixing time for source $x$) is $\tau^x(\epsilon) = \min t : ||\pi_x(t) - \pi|| < \epsilon$. Note that $\pi_x(t)$ is the probability distribution on the graph $G_t$ in the dynamic graph process $\{ G_t : t\geq 1\}$ when the initial distribution ($\pi_x(1)$) starts with probability $1$ at node $x$ on $G_1$. Define $\tau^x_{mix}$ (mixing time for source $x$) $ = \tau^x(1/2e)$ and $\tau_{mix} = \max_{x} \tau^x_{mix}$. The dynamic mixing time is upper bounded by   $\tau = \max \{$mixing time of all the static graph $G_t : t \geq 1\}$. Notice that $\tau \geq \tau_{mix}$ in general. 
\end{definition} 

%\subsection{Mixing time}
It is known that simple random walks on regular, connected, non-bipartite static graph have mixing time $O(\frac{\log n}{1 - \lambda_2})$, where $\lambda_2$ is the second largest eigenvalue in absolute value of the graph. Interestingly, it turns out that similar result holds for $d$-regular, connected, non-bipartite evolving graphs. We show that the mixing time of a simple random walk on a dynamic graph $\mathcal{G} = G_1, G_2, \ldots$ is $O(\frac{\log n}{1 - \lambda})$, where $\lambda$ is an upper bound of the second largest eigenvalue in absolute value of the graphs $\{G_t : t \geq 1\}$. 
\iffalse
\begin{lemma}\label{lem:lemma1}
Let $G$ be an undirected connected non-bipartite $d$-regular graph on $n$ vertices. Let $A_G$ be the transition matrix of $G$. If $\lambda_1 \geq \lambda_2 \geq \ldots \geq \lambda_n$ are the eigenvalues of $A_G$, then $\lambda_1 = 1$ and for $i \geq 2, \lambda_i \leq 1 - \frac{1}{d D n}$, where $D$ is the diameter of the graph.  
\end{lemma}
\begin{proof}
The normalized eigenvector corresponding to $\lambda_1 = 1$ is $X^1 = \frac{1}{\sqrt{n}}(1, 1, \ldots , 1)$. Consider any normalized real eigenvector $X \perp X^1$ with its eigenvalue $\lambda$. Hence, $\sum_{i=1}^n x_{i}^2 = 1$ and $ \sum_{i=1}^n x_i = 0$, where $X = (x_1, x_2, \ldots ,x_n)$. Let $x_t$ and $x_s$ be the respectively largest and smallest co-ordinates of $X$. Clearly $ x_t \geq 1/\sqrt{n}$ and $x_s < 0$. Let $s=v_1 \to v_2 \to \ldots \to v_k =t$ be the vertices on a shortest path from $s$ to $t$ in $G$. Consider $X(\mathbb{I} - A_G)X^T$. Then,
\begin{align*}
1 - \lambda & = X(\mathbb{I} - A_G)X^T = \frac{1}{d} \cdot \sum_{\{i, j\} \in E(G)} (x_i - x_j)^2 \\
& \geq \frac{1}{d} \cdot \sum_{i=1}^{k-1} (x_{v_i} - x_{v_{i+1}})^2 \\
& \geq \frac{1}{d(k - 1)} \cdot \left( \sum_{i = 1}^{k-1} x_{v_i} - x_{v_{i+1}} \right) \\
& [\text{by Cauchy-Schwarz inequality}] \\
& = \frac{1}{d (k-1)} \cdot (x_s - x_t)^2 \\
& \geq \frac{1}{d D n}
\end{align*}
where $k-1 \leq D$, the diameter of the graph.
\end{proof}
\fi

\begin{lemma}\label{lem:lemma2}
Let $G$ be an undirected connected non-bipartite $d$-regular graph on $n$ vertices and $p=(p_1, \ldots ,p_n)$ be any probability distribution on its vertices. Let $A_G$ be the transition matrix of a simple random walk on $G$. Then,
$$ \bigl\Vert pA_G - \frac{\textbf{1}}{n} \bigr\Vert \leq \bar{\lambda_2}\cdot \bigl\Vert p - \frac{\textbf{1}}{n} \bigr\Vert $$ where $\bar{\lambda_2} = \max_{i=2,\ldots,n} \lvert \lambda_i \rvert = \max \{\lambda_2, - \lambda_n\}$ be the second largest eigenvalue in absolute value.
\end{lemma}
\begin{proof}
Let $X_1, X_2, \ldots ,X_n$ be an orthonormal set of eigenvectors of $A_G$ with corresponding eigenvalues $\lambda_1 \geq \lambda_2 \geq \ldots \geq \lambda_n$. Since $A_G$ is symmetric stochastic matrix, $\lambda_1 = 1$ and $X_1 = \frac{\textbf{1}}{\sqrt{n}}$ and all eigenvectors and eigenvalues are real. Clearly, 
$$ \bigl\Vert pA_G - \frac{\textbf{1}}{n} \bigr\Vert = \bigl\Vert pA_G - \frac{\textbf{1}}{n}A_G \bigr\Vert = \bigl\Vert (p - \frac{\textbf{1}}{n}) A_G \bigr\Vert $$
Since $p$ is a probability distribution, we can write it as $p = \beta_1X_1 + \beta_2X_2 + \ldots + \beta_n X_n$, where $\beta_1, \beta_2, \ldots ,\beta_n \in \mathbb{R}$. Then $\beta_1 = p\cdot X_1^T = \frac{1}{\sqrt{n}} \sum_{i}p_i = \frac{1}{\sqrt{n}}$, so that $\beta_1 X_1 = (\frac{1}{n}, \frac{1}{n}, \ldots ,\frac{1}{n}) $. Therefore, $p - \frac{\textbf{1}}{n} = \sum_{i=2}^n \beta_i X_i $. Hence, $$ \bigl\Vert p - \frac{\textbf{1}}{n} \bigr\Vert = \sqrt{\sum_{i=2}^n \beta_i^2} $$
Furthermore, 
\begin{align*} \bigl\Vert (p - \frac{\textbf{1}}{n}) A_G \bigr\Vert & = \bigl\Vert \sum_{i=2}^n \beta_i X_i A_G \bigr\Vert \\
& = \bigl\Vert \sum_{i=2}^n \lambda_i \beta_i X_i \bigr\Vert \\ 
& = \sqrt{\sum_{i=2}^n \lambda_i^2 \beta_i^2} \\
& \leq \max_{i=2,\ldots,n} \vert \lambda_i \vert \cdot \sqrt{\sum_{i=2}^n \beta_i^2} \\
& = \bar{\lambda_2}\cdot \bigl\Vert p - \frac{\textbf{1}}{n} \bigr\Vert 
%& \leq \left(1 - \frac{1}{d D n} \right) \sqrt{\sum_{i=2}^n \beta_i^2} && [\text{By the Lemma~\ref{lem:lemma1}]} \\
%& = \left(1 - \frac{1}{d D n} \right) \left\Vert p - \frac{\textbf{1}}{n} \right\Vert 
\end{align*}
Thus, $$\bigl\Vert pA_G - \frac{\textbf{1}}{n} \bigr\Vert \leq \bar{\lambda_2}\cdot \bigl\Vert p - \frac{\textbf{1}}{n} \bigr\Vert  $$
\end{proof}

%Therefore the above lemma becomes: 
%\begin{lemma}\label{lem:lemma3}
%Let $G$ be an undirected connected $d$-regular graph on $n$ vertices and $p=(p_1, \ldots ,p_n)$ be a probability distribution on its vertices. Let $A_G$ be the transition matrix of a simple random walk on $G$. Then,
%$$ \left\Vert pA_G - \frac{\textbf{1}}{n} \right\Vert \leq \left( 1 - \frac{1}{n^2} \right) \left\Vert p - \frac{\textbf{1}}{n} \right\Vert $$
%\end{lemma}

An immediate corollary follows from the previous lemma: 
\begin{corollary}\label{cor:cor1}
Let $\mathcal{G} = G_1, G_2, \ldots $ be a sequence of undirected connected non-bipartite $d$-regular graphs on the same vertex set $V$. If $p_0$ is the initial probability distribution on $V$ and we perform a simple random walk on $\mathcal{G}$ starting from $p_0$, then the probability distribution $p_t$ of the walk after $t$ steps satisfies, 
$$\bigl\Vert p_t - \frac{\textbf{1}}{n} \bigr\Vert \leq {\lambda}^t \bigl\Vert p_0 - \frac{\textbf{1}}{n} \bigr\Vert $$ where $\lambda$ is an upper bound on the second largest eigenvalue in absolute value of the graphs $\{G_t : t \geq 1\}$. 
\end{corollary}

\begin{theorem}\label{thm:mixtime}
For any $d$-regular connected non-bipartite evolving graph $\mathcal{G}$, the dynamic mixing time of a simple random walk on $\mathcal{G}$ is bounded by $O(\frac{1}{1- {\lambda}}\log n)$, where $\lambda$ is an upper bound of the second largest eigenvalue in absolute value of any graph in $\mathcal{G}$.
\end{theorem}
\begin{proof}
Let the random walk starts from a given vertex with distribution $p_0 = (1, 0, \ldots, 0)$. From Corollary~\ref{cor:cor1} and the fact that $\bigl\Vert p_0 - \frac{\textbf{1}}{n} \bigr\Vert = O(1)$ we have,
$$\bigl\Vert p_t - \frac{\textbf{1}}{n} \bigr\Vert \leq {\lambda}^t $$
So for $t = O(\frac{1}{1 - {\lambda}} \log n)$ gives $\bigl\Vert p_t - \frac{\textbf{1}}{n} \bigr\Vert \leq \frac{1}{n^{O(1)}}$.  
\end{proof}

\begin{lemma}\label{lem:eigen-bound}
Let $G$ be an undirected connected $d$-regular graph on $n$ vertices. Let $A_G$ be the transition matrix of $G$. If $\lambda_1 \geq \lambda_2 \geq \ldots \geq \lambda_n$ are the eigenvalues of $A_G$, then $\lambda_1 = 1$ and for $i \geq 2, \lambda_i \leq 1 - \frac{1}{d D n}$, where $D$ is the diameter of the graph.  
\end{lemma}
\begin{proof}
The normalized eigenvector corresponding to $\lambda_1 = 1$ is $X_1 = \frac{1}{\sqrt{n}}(1, 1, \ldots , 1)$. Consider any normalized real eigenvector $X \perp X_1$ with its eigenvalue $\lambda$. Hence, $\sum_{i=1}^n x_{i}^2 = 1$ and $ \sum_{i=1}^n x_i = 0$, where $X = (x_1, x_2, \ldots ,x_n)$. Let $x_t$ and $x_s$ be the respectively largest and smallest co-ordinates of $X$. Clearly $ x_t \geq 1/\sqrt{n}$ and $x_s < 0$. Let $s=v_1 \to v_2 \to \ldots \to v_k =t$ be the vertices on a shortest path from $s$ to $t$ in $G$. Consider $X(\mathbb{I} - A_G)X^T$. Then,
\begin{align*}
1 - \lambda & = X(\mathbb{I} - A_G)X^T = \frac{1}{d} \cdot \sum_{\{i, j\} \in E(G)} (x_i - x_j)^2 \\
& \geq \frac{1}{d} \cdot \sum_{i=1}^{k-1} (x_{v_i} - x_{v_{i+1}})^2 \\
& \geq \frac{1}{d(k - 1)} \cdot \left( \sum_{i = 1}^{k-1} x_{v_i} - x_{v_{i+1}} \right) \\
& [\text{by Cauchy-Schwarz inequality}] \\
& = \frac{1}{d (k-1)} \cdot (x_s - x_t)^2 \\
& \geq \frac{1}{d D n}
\end{align*}
where $k-1 \leq D$, the diameter of the graph.
\end{proof}

\begin{corollary}\label{cor:second-eigen-bound}
$(1- \frac{1}{n^2})$ is an upper bound of the second largest eigenvalue $\bar{\lambda_2}$ of the transition matrix of any undirected connected regular graph on $n$-vertices. 
\end{corollary}
\begin{proof}
This follows from the Lemma~\ref{lem:eigen-bound} and the fact that the diameter of any connected regular graph is bounded by $O(\frac{n}{d})$. 
\end{proof}

\subsection{Monotonicity property of the distribution vector}\label{sec:monotonicity}
Let $\pi_x(t)$ define the probability distribution vector of a simple random walk reached after $t$ steps when the initial distribution starts with probability $1$ at node $x$. Let $\pi$ denote the stationary distribution vector. We show in the following lemma that the vector $\pi_x(t)$ gets closer to $\pi$ as $t$ increases. 
\begin{lemma} %[\textbf{Monotonicity}]
\label{lem:monotonicity}
$||\pi_x(t+1) - \pi|| \leq  ||\pi_x(t) - \pi||$.
\end{lemma}
\begin{proof}
We need to show that the definition of mixing times are consistent, i.e. monotonic in $t$, the walk length of the random walk. Let $A$ be the transition matrix of a simple random walk on a $d$-regular evolving graph $\mathcal{G}$ which in fact changes from round to round. The entries $a_{ij}$ of $A$ denotes the probability of transitioning from node $i$ to node $j$. The monotonicity follows from the fact that for any transition matrix $A$ of any regular graph and for any probability distribution vector $p$, $$||(p - \frac{\mathbf{1}}{n}) A|| < ||p - \frac{\mathbf{1}}{n}||.$$ This result follows from the above Lemma~\ref{lem:lemma2} and the fact that $\bar{\lambda_2} < 1$.

Let $\pi$ be the stationary distribution of the matrix $A$. Then $\pi = (\frac{1}{n}, \frac{1}{n}, \ldots, \frac{1}{n})$. This implies that if $t$ is $\epsilon$-near mixing time, then $||p A^t - \pi|| \leq \epsilon$, by definition of $\epsilon$-near mixing time. Now consider $||p A^{t+1} - \pi||$. This is equal to $||p A^{t+1} - \pi A||$ since $\pi A = \pi$.  However, this reduces to $||(p A^{t} - \pi) A|| < ||p A^t - \pi|| \leq \epsilon$. It follows that $(t+1)$ is $\epsilon$-near mixing time and $||p A^{t+1} - \pi|| < ||p A^t - \pi||$.
\end{proof}