% $Header: /cvsroot/latex-beamer/latex-beamer/solutions/conference-talks/conference-ornate-20min.en.tex,v 1.7 2007/01/28 20:48:23 tantau Exp $

\documentclass{beamer}

\mode<presentation>
{
  %\usetheme{Warsaw}
  \usetheme{Antibes}
  \setbeamercovered{transparent}
  % or whatever (possibly just delete it)
}

\usepackage[english]{babel}
\usepackage[latin1]{inputenc}
\usepackage{times}
\usepackage[T1]{fontenc}
\usepackage{tikz}
\usepackage{amssymb, amsmath}

\setbeamertemplate{footline}[frame number]    % add slide number

%\usepackage{pdflibraryarrows}
% Or whatever. Note that the encoding and the font should match. If T1
% does not look nice, try deleting the line with the fontenc.

\title[Enabling and Controlling Diffusion Processes in Networks] % (optional, use only with long paper titles)
{Enabling and Controlling Diffusion Processes in Networks}

%\subtitle
%{Ph.D. Thesis Proposal}

\author[] % (optional, use only with lots of authors)
{Zhifeng~Sun}
% - Give the names in the same order as the appear in the paper.
% - Use the \inst{?} command only if the authors have different
%   affiliation.

\institute[Northeastern University] % (optional, but mostly needed)
{
  %\inst{1}%
  Department of Computer Science\\
  Northeastern University
}

\date[April 13, 2012] % (optional, should be abbreviation of conference name)
{April 13, 2012}
% - Either use conference name or its abbreviation.
% - Not really informative to the audience, more for people (including
%   yourself) who are reading the slides online

\subject{Theoretical Computer Science}
% This is only inserted into the PDF information catalog. Can be left
% out. 

% Delete this, if you do not want the table of contents to pop up at
% the beginning of each subsection:
\AtBeginSubsection[]
{
  \begin{frame}<beamer>{Outline}
    \tableofcontents[currentsection,currentsubsection]
  \end{frame}
}


% If you wish to uncover everything in a step-wise fashion, uncomment
% the following command: 

%\beamerdefaultoverlayspecification{<+->}

\begin{document}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% macros
\newcommand{\junk}[1]{}
\newcommand{\rb}[1]{\left( #1 \right)} %Round
\newcommand{\abs}[1]{\left| #1 \right|} %| |
%\newcommand{\md}[1]{\delta_{#1}}                            % min degree
%\newcommand{\gf}[1]{G_{#1}}                                     % graph
%\newcommand{\nei}[3]{N^{#2}_{#1}\rb{#3}}                % neighbor
%\newcommand{\dg}[2]{d_{#1}\rb{#2}}                          % degree
%\newcommand{\dgi}[3]{d_{#1}\rb{#2,#3}}                   % induced degree
%\newcommand{\prb}[2]{p_{#1,#2}}
%\newcommand{\prob}[1]{\Pr\left[ #1 \right]}

\newcommand{\md}[1]{\delta}                            % min degree
\newcommand{\gf}[1]{G}                                     % graph
\newcommand{\nei}[3]{N^{#2}\rb{#3}}                % neighbor
\newcommand{\dg}[2]{\mbox{degree}\rb{#2}}                          % degree
\newcommand{\dgi}[3]{\mbox{degree}\rb{#2,#3}}                   % induced degree
\newcommand{\prb}[2]{p_{#1,#2}}
\newcommand{\prob}[1]{\Pr\left[ #1 \right]}

\begin{frame}
  \titlepage
\end{frame}

\junk{
\begin{frame}{Outline}
  \tableofcontents
  % You might wish to add the option [pausesections]
\end{frame}
}


%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Introduction}

\begin{frame}{Diffusion process}
\begin{itemize}
\item Diffusion is the spread of information or some flow in a network
  through local transmissions.
\item Positive diffusion:
  \begin{itemize}
  \item Diffuse useful information (e.g. innovations, ideas).
  \item Analyze the converging time of diffusion processes.
  \item Design efficient algorithms for fast diffusion.
  \end{itemize}
\item Harmful/Negative diffusion:
  \begin{itemize}
  \item Diffuse harmful information (e.g. diseases, viruses).
  \item Analyze the converging time and the extend of diffusion
    processes
  \item Design good intervention strategies.
  \end{itemize}
\end{itemize}
\end{frame}

\begin{frame}{Motivation}
\begin{columns}
  \column{0.5\textwidth}
  %\begin{exampleblock}{Human contact network}
    \begin{figure}
      \includegraphics[width=\textwidth]{fig/human.jpg}
    \end{figure}
  %\end{exampleblock}
 \begin{itemize}
    \item Innovations, ideas, gossip.
   \item Diseases.
    \item Friendship.
  \end{itemize}

  \column{0.5\textwidth}
  %\begin{exampleblock}{Computer network}
    \begin{figure}
      \includegraphics[width=.9\textwidth]{fig/netwk.jpg}
    \end{figure}
  %\end{exampleblock}
  \begin{itemize}
    \item Resource discovery.
    \item Computer viruses.
    \item Also sensor networks, mobile networks, etc.
  \end{itemize}
\end{columns}
\end{frame}

\begin{frame}{Thesis concentration}
\begin{itemize}
\item Enable positive diffusion.
  \begin{itemize}
  \item Resource discovery through gossip.
  \item Information spreading under adversarial dynamics.
  \end{itemize}
\item Control negative diffusion.
  \begin{itemize}
  \item What is the optimal centralized intervention strategy for a
    given contact network?
  \item How effective are interventions of individual choices and
    behaviors.
    \begin{itemize}
    \item Individuals make their own intervention strategies.
    \item Individuals exhibit risk behavior changes.
    \end{itemize}
  \end {itemize}
\end{itemize}
\end{frame}

\begin{frame}{Outline}
  \tableofcontents
  % You might wish to add the option [pausesections]
\end{frame}


%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Resource discovery through gossip}
\subsection{Definition and motivation for discovery processes}

\begin{frame}{Triangulation process (Push discovery)}
\begin{columns}
  \column{0.5\textwidth}
  \begin{itemize}
  \item In each round, each node chooses two random neighbors and
    connects them by ``pushing'' their mutual information to each
    other.
  \end{itemize}

  \column{0.5\textwidth}
  \begin{figure}
    \includegraphics[width=\textwidth]{fig/triangulation.jpg}
  \end{figure}
\end{columns}
\end{frame}

\begin{frame}{two-hop walk process (Pull discovery)}
\begin{columns}
  \column{0.5\textwidth}
  \begin{itemize}
  \item In each round, each node connects itself to a random neighbor
    of a neighbor chosen uniformly at random, by ``pulling'' a random
    neighboring ID from a random neighbor.
  \end{itemize}

  \column{0.5\textwidth}
  \begin{figure}
    \includegraphics[width=\textwidth]{fig/twohopwalk.jpg}
  \end{figure}
\end{columns}
\end{frame}

\begin{frame}{Applications - Resource discovery}
\begin{columns}
  \column{0.5\textwidth}
  \begin{itemize}
  \item In P2P network, nodes can communicate with known IP addresses.
  \item Efficient algorithms to discover all IP addresses in network.
%  \item Resource Discovery.
  \end{itemize}

  \column{0.5\textwidth}
  \begin{figure}
    \includegraphics[width=\textwidth]{fig/rd-problem.jpg}
  \end{figure}
\end{columns}
\end{frame}

\begin{frame}{Application - Group member discovery}
\begin{columns}
  \column{0.5\textwidth}
  \begin{itemize}
  \item In social network, people in the same group get to know each
    other by
    \begin{itemize}
    \item introduced by their mutual friends within the group
    \item reach out to the friends of their friends within the group
    \end{itemize}
%  \item Subgroup participating the process
  \end{itemize}

  \column{0.5\textwidth}
  \begin{figure}
    \includegraphics[width=\textwidth]{fig/friendship.jpg}
  \end{figure}

\end{columns}
\end{frame}

\begin{frame}{Goal and Challenges}
\begin{itemize}
\item Goal is to let every node discover all other nodes in the
  network.
\item Trivial lower bound $\Omega(n)$.
\item Potential function argument failed due to \alert{non-monotonicity}
  \begin{itemize}
  \item If $G$ is a sub graph of $H$, process may complete faster in
    $G$ than $H$.
  \item Line v.s. Dumbbell in two-hop walk process.
  \item Counter-intuitive but not surprising, since random walks
    exhibit similar non-monotonicity property.
  \end{itemize}
\end{itemize}
\end{frame}

\begin{frame}{Converging time of triangulation process}
\begin{figure}
  \includegraphics[width=0.8\textwidth]{fig/triangle.jpg}
\end{figure}
\end{frame}

\begin{frame}{Converging time of two-hop walk process}
\begin{figure}
  \includegraphics[width=0.8\textwidth]{fig/randwalk.jpg}
\end{figure}
\end{frame}


\junk{%%%%%%%%%%%%
\begin{frame}{Related work}
\begin{itemize}
\item \mbox{[Harchol-balter et al 1999]} studied this process with
  message size $\Omega(n)$, and showed an $O(\log^2 n)$ bound.
\item \mbox{[Law-Siu 2000]} gave an $O(\log n)$ randomized algorithm
  for resource discovery where the message size is $\Omega(n)$.
\item \mbox{[Kutten-Peleg-Vishkin 2003]} proposed a deterministic
  algorithm which solves resource discovery in $O(\log n)$ time but
  the message size is still $\Omega(n)$.
\item \mbox{[Kutten-Peleg 2002] and [Abraham-Dolev 2006]} studied
  asynchronous resource discovery.
\end{itemize}
\end{frame}
}%%%%%%%%%%%

\junk{%%%%%%%
\begin{frame}{Problem formulation}
\begin{itemize}
\item Design processes to transform any graph to a complete graph.
\end{itemize}

\begin{figure}
  \includegraphics[width=\textwidth]{fig/rd-beginend.jpg}
\end{figure}
\end{frame}
}%%%%%%%

\begin{frame}{Results}
\begin{itemize}
  \item Undirected graphs:
    \begin{itemize}
    \item We show both push and pull discovery processes converge in
      $O(n \log^2 n)$ rounds for any undirected $n$-node graph with
      high probability.
    \item We show $\Omega(n\log n)$ is a lower bound on the number of
      rounds needed for almost any $n$-node graph.
    \end{itemize}
  \item Directed graphs:
    \begin{itemize}
    \item We show pull process takes $O(n^2 \log n)$ rounds for any
      $n$-node graph with high probability.
    \item We show an $\Omega(n^2 \log n)$ lower bound for weakly
      connected graphs, and $\Omega(n^2)$ lower bound for strongly
      connected graphs.
    \end{itemize}
  \item Results published in SPAA 2012. Joint work with Haeupler,
    Pandurangan, Peleg, and Rajaraman.
\end{itemize}
\end{frame}

\subsection{Upper bound proof for one discovery process}

\begin{frame}
\begin{theorem}
Two-hop walk process completes in $O(n\log^2 n)$ rounds with high
probability.
\end{theorem}
\begin{itemize}
\item We show the \alert{minimum degree increases geometrically} every
  $O(n\log n)$ rounds with high probability.
\item $\md{t}$: minimum degree of graph $\gf{t}$.
\item $\nei{t}{i}{u}$: set of nodes that are at \alert{distance $i$}
  from $u$ in $\gf{t}$.
\item $| \nei{t}{i}{u} |$: number of nodes in $\nei{t}{i}{u}$.
\item $\dg{t}{u}$: degree of node $u$ in $\gf{t}$.
\item $\dgi{t}{u}{S}$: number of edges from $u$ to nodes in $S$, i.e.,
  degree induced on $S$.
\end{itemize}
\end{frame}

\begin{frame}{Notation example}
\begin{columns}
  \column{0.7\textwidth}
  \begin{itemize}
  \item $\md{t} = 1$.
  \item $\nei{t}{1}{u} = $\{\textcolor{green}{green nodes}\}. $|
    \nei{t}{1}{u} |=2$.
  \item $\nei{t}{2}{u} = $\{\textcolor{red}{red nodes}\}. $|
    \nei{t}{2}{u} | = 3$.
  \item $\nei{t}{3}{u} = $\{\textcolor{yellow}{yellow nodes}\}. $|
    \nei{t}{3}{u} | = 2$.
  \item $\nei{t}{4}{u} = $\{\textcolor{blue}{blue nodes}\}. $|
    \nei{t}{4}{u} | = 1$.
  \item $\dg{t}{u} = 2$.
%  \item $\dgi{t}{u}{\nei{t}{2}{u}} = 0$, $\dgi{2t}{u}{\nei{t}{2}{u}} = 2$
  \end{itemize}

  \column{0.3\textwidth}
  \begin{figure}
    \includegraphics[width=\textwidth]{fig/rd-notation.jpg}
  \end{figure}
\end{columns}
\end{frame}

\junk{%%%%%%%%%%%%
\begin{frame}{High level proof structure}
\begin{itemize}
\item We show the minimum degree of the graph grows exponentially.
  \begin{itemize}
  \item Case 1: when the two-hop neighborhood, $\nei{t}{2}{u}$, is not
    too large, there exists $T=O(n\log n)$ such that $\dg{T}{u} \ge
    \min\{2\md{0},n-1\}$.
  \item Case 2: when the two-hop neighborhood is not too small, there
    exists $T=O(n\log n)$ such that $\dg{T}{u}\ge \min\{(1+1/8)\md{0},
    n-1\}$.
  \end{itemize}
\item Iterate $O(\log n)$ times, the graph will become complete graph.
\end{itemize}
\end{frame}
}%%%%%%%%%%%%


\begin{frame}{High level proof structure}
\begin{figure}
  \includegraphics[width=\textwidth]{fig/triangulation-highlevelproof.jpg}
\end{figure}
\end{frame}

\begin{frame}
\begin{itemize}
\item Weakly tied: $u$ is weakly tied to set of nodes $S$ if
  $\dgi{t}{u}{S}<\md{0}/4$.
\item Strongly tied: $u$ is strongly tied to set of nodes $S$ if
  $\dgi{t}{u}{S}\ge \md{0}/4$.
\end{itemize}

\begin{figure}
  \includegraphics[width=\textwidth]{fig/weak-strong.jpg}
\end{figure}
\end{frame}

\begin{frame}{Two-hop neighborhood is not too large}
\begin{lemma}
There exists $T=O(n\log n)$ such that either $| \nei{T}{2}{u} | \ge
\md{0}/2$ or $\dg{T}{u}\ge \min\{2\md{0}, n-1\}$ with probability at
least $1-1/n^2$.
\end{lemma}
\begin{itemize}
\item If $|\nei{t}{2}{u}|\ge \md{0}/2$ where $t<cn\log n$, then lemma
  holds.
\item Focus on case where $|\nei{t}{2}{u}|< \md{0}/2$ for $t<cn\log
  n$.
  \begin{itemize}
  \item Any node $v\in \nei{0}{2}{u}$ will be strongly tied to
    $\nei{t}{1}{u}$ where $t=O(n\log n)$.
  \item $v$ will connect to $u$ in $O(n\log n)$ rounds.
  \item $\nei{0}{2}{u}$ will connect to $u$ at $t_1 = O(n\log n)$.
  \item Repeat.
  \end{itemize}
\end{itemize}
\end{frame}

\begin{frame}
\begin{figure}
  \includegraphics[width=\textwidth]{fig/rd-lemma1-high.jpg}
\end{figure}
\end{frame}

\begin{frame}
\begin{columns}
  \column{0.6\textwidth}
  \begin{itemize}
  \item $v$ is weakly tied to $\nei{t}{1}{u}$.
  \item $\dg{0}{w}\ge \md{0}$, $|\nei{t}{2}{u}|<\md{0}/2$, thus
    $\dgi{t}{w}{\nei{0}{1}{u}} \ge \md{0}/2$.
  \item \junk{$\md{0}\le \dg{t}{u}\le 2\md{0}$,} thus prob. $v$ connects a
    node in $\nei{0}{1}{u}$ through $w_0$ in one round is $\ge
    1/n\cdot \alert{1/4}$.
  \item At time $X_1$, $v$ connects to $w_1$.
  \item Prob. $v$ connects a node in $\nei{X_1}{1}{u}$ through $w_0$
    or $w_1$ is $\ge \alert{2} \cdot 1/n\cdot 1/4$.
  \end{itemize}

  \column{0.5\textwidth}
  \begin{figure}
    \includegraphics[width=\textwidth]{fig/rd-lemma1-1.jpg}
  \end{figure}
\end{columns}
\end{frame}

\begin{frame}
\begin{itemize}
\item $X_1+X_2+\dots+X_{\md{0}/4} \le 16n\ln n$ with probability at
  least $1-1/n^3$.
\end{itemize}

\begin{columns}
  \column{0.5\textwidth}
  \begin{itemize}
  \item $v$ is strongly tied to $\nei{t}{1}{u}$.
  \item Prob. $u$ connects to $v$ in a single round is $\ge
    (\md{0}/4)/(2\md{0})\cdot 1/n = 1/8n$.
  \item $u$ connects to $v$ in $24n\ln n$ with probability at least
    $1-1/n^3$.
  \end{itemize}

  \column{0.5\textwidth}
  \begin{figure}
    \includegraphics[width=\textwidth]{fig/rd-lemma1-2.jpg}
  \end{figure}
\end{columns}
\end{frame}

\begin{frame}
\begin{columns}
  \column{0.6\textwidth}
  \begin{itemize}
  \item $|\cup_{i=1}^4 \nei{t}{i}{u}| \ge \min\{2\md{t}, n-1\}$
  \item Probability that $u$ connects to $\cup_{i=1}^4 \nei{0}{i}{u}$
    in $T=O(n\log n)$ rounds is at least $1-|\cup_{i=1}^4
    \nei{0}{i}{u}|/n^3 \ge 1-1/n^2$.
  \item This completes the proof of the lemma.
  \end{itemize}

  \column{0.5\textwidth}
  \begin{figure}
    \includegraphics[width=\textwidth]{fig/rd-lemma1-3.jpg}
  \end{figure}
\end{columns}
\end{frame}

\junk{%%%%%%%%%%%%%%%
\begin{frame}{Two-hop neighborhood is not too small}
\begin{lemma}
There exists $T=O(n\log n)$ such that either $| \nei{T}{2}{u} | <
\md{0}/4$ or $\dg{T}{u}\ge \min\{(1+1/8)\md{0}, n-1\}$ with
probability at least $1-1/n^2$.
\end{lemma}

\begin{itemize}
\item If $|\nei{t}{2}{u}<\md{0}/4$ where $t<cn\log n$, then lemma
  holds.
\item Focus on the case where $|\nei{t}{2}{u}\ge\md{0}/4$ for
  $t<cn\log n$.
\item If $v\in \nei{0}{2}{u}$ is strongly tied to $\nei{0}{2}{u}$,
  then $u$ connects to $v$ in $O(n\log n)$ rounds with high
  probability.
\item If there are at least $\md{0}/8$ strongly tied nodes in
  $\nei{0}{2}{u}$, then lemma holds.
\item Focus on the case where less than $\md{0}/8$ nodes in
  $\nei{0}{2}{u}$ are strongly tied.
\end{itemize}
\end{frame}

\begin{frame}
\begin{itemize}
\item The number of weakly tied nodes in $\nei{0}{2}{u}$ is at least
  $\md{0}/8$.
\item If $v$ is weakly tied to $\nei{t}{1}{u}$, then $v$ has more than
  $3\md{0}/4$ edges to $\nei{0}{2}{u}\cup \nei{0}{3}{u}$.
\end{itemize}

\begin{figure}
  \includegraphics[width=.8\textwidth]{fig/rd-lemma2-1.jpg}
\end{figure}
\end{frame}

\begin{frame}
\begin{columns}
  \column{0.7\textwidth}
  \begin{itemize}
  \item Let $W_t$ be the set of nodes in $\nei{t}{2}{u}$ that are
    weakly tied to $\nei{t}{1}{u}$. $|W_0|\ge \md{0}/8$.
  \item Define a length-2 path from $u$ to a node two hops away as an
    \alert{out-path}.
  \item Let $P_t$ be the set of out-path at time
    $t$. $|P_0|\ge\md{0}/8$.
  \end{itemize}

  \column{0.4\textwidth}
  \begin{figure}
    \includegraphics[width=\textwidth]{fig/outpath.jpg}
  \end{figure}
\end{columns}
\end{frame}

\begin{frame}
\begin{columns}
  \column{0.6\textwidth}
  \begin{itemize}
  \item Probability that $u$ picks an out-path in $P_t$ is 
  \end{itemize}
  \begin{eqnarray*}
    \sum_{v\in\nei{t}{1}{u}}
    \frac{1}{\dg{t}{u}}\cdot\frac{\dgi{t}{v}{\nei{t}{2}{u}}}{\dg{t}{v}} \\
    \ge \sum_{v\in\nei{t}{1}{u}} \frac{1}{\dg{t}{u}}\cdot
    \frac{\dgi{t}{v}{\nei{t}{2}{u}}}{n-1}  \\
    \ge \frac{\sum_{v\in \nei{t}{1}{u}} \dgi{t}{v}{\nei{t}{2}{u}}}{(1+1/8)\md{0}(n-1)} \\
    \ge \frac{\alert{|P_t|}}{(1+1/8)\md{0}(n-1)}\ge\frac{1}{9n}.
  \end{eqnarray*}

  \column{0.5\textwidth}
  \begin{figure}
    \includegraphics[width=\textwidth]{fig/rd-lemma2-2.jpg}
  \end{figure}
\end{columns}
\end{frame}

\begin{frame}
\begin{itemize}
\item Old lower bound on $|P_t|$ before adding $v_1$ is $\md{0}/8$.
\item After including a weakly tied node, $v_1$, to $\nei{t}{1}{u}$,
  what's the new lower bound on $|P_t|$?
\item $\md{0}/8 - 1 + 3\md{0}/4 - \md{0}/4 - \md{0}/8 \ge \md{0}/8 +
  \md{0}/4$.
\item After including $v_2$, lower bound is $\md{0}/8 + 2\cdot
  \md{0}/4$, etc.
\item Let $X_i$ be the number of round that $v_i$ is added. Then
  $X_1+X_2+X_{\md{0}/8}=O(n\log n)$ with probability at least
  $1-1/n^3$.
\end{itemize}

\begin{figure}
    \includegraphics[width=.8\textwidth]{fig/rd-lemma2-3.jpg}
\end{figure}
\end{frame}

\begin{frame}
\begin{theorem}
For connected undirected graphs, the two-hop walk process completes in
$O(n\log^2 n)$ rounds with high probability.
\end{theorem}
\begin{itemize}
\item We show in $T=O(n\log n)$ rounds, minimum degree of the graph
  increases by a factor of 1/8,
  i.e. $\md{T}\ge\min\{(1+1/8)\md{0},n-1\}$.
\item Use 2 overlapping cases ($|\nei{t}{2}{u}|<\md{0}/2$ and
  $|\nei{t}{2}{u}|\ge\md{0}/4$) to handle case switching.
  \begin{itemize}
  \item If $\nei{t}{2}{u}<\md{0}/2$ for $O(n\log n)$, theorem holds by
    the first lemma.
  \item If $\nei{t}{2}{u}\ge \md{0}/2$ at any time, theorem holds by
    the second lemma.
  \end{itemize}
\item Apply the above argument $O(\log n)$ times. We obtain the
  $O(n\log^2 n)$ upper bound.
\end{itemize}
\end{frame}
}%%%%%%%%%%%%%%


%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\section{Information spreading under adversarial dynamics}
\subsection{Definition and motivation for information spreading}

\begin{frame}{Problem definition}
\begin{itemize}
\item {\bf $k$-gossip}: $k$ different tokens are assigned to a set $V$
  of $n\ge k$ nodes, where each node may have any subset of the
  tokens, and the goal is to disseminate all the $k$ tokens to all
  nodes.
\item How many rounds needed to complete $k$-gossip?
\item In static network, it can be solved in $O(n+k)$ rounds using, for
  example, a spanning tree.
\item What happens in adversarial networks?
\end{itemize}
\end{frame}

\begin{frame}{Models}
\begin{itemize}
\item \alert{Online model}: adversarial model of [Kuhn-Lynch-Oshman
  2010]
  \begin{itemize}
  \item Nodes communicate using anonymous {\bf broadcast}
  \item At the beginning of round $r$, each node decides what token to
    send based on its internal state (and coin tosses)
  \item Adversary knows everyone's state, and chooses the set of edges
    that forms a connected communication network $G_r$ for round $r$.
  \end{itemize}
\item \alert{Strong adversary}: adversary also knows the coin tosses
  outcomes in current round, but not of future rounds.
\item \alert{Token-forwarding algorithms}: doesn't combine or alter
  tokens, only stores and forwards them.
\end{itemize}
\end{frame}


\begin{frame}{Related work}
\begin{itemize}
\item \mbox{[Kuhn et al 2010]} shows $k$-gossip can be solved in
  $O(kn)$ rounds, and given an $\Omega(n\log k)$ lower bound for {\bf
  token-forwarding algorithms} under {\bf strong adversary}.
\item \mbox{[Haeupler-Karger 2011]} studied $k$-gossip using network
  encoding. Improve the running time to $O(n)$ but require message
  size to be $\Omega(n\log n)$ bits.
\item Other research on $k$-gossip problem:
  \begin{itemize}
  \item \mbox{[Karp-Schindelhauer-Shenker-V\"{o}cking 2000]} introduced
    pull and push models.
  \item \mbox{[Boyd-Ghosh-Prabhakar-Shah 2006]} studied randomized
    gossip algorithms.
  \item \mbox{[Mosk-Aoyama-Shah 2006]} studied how to compute separable
    functions via gossip.
  \end{itemize}
\end{itemize}
\end{frame}

\begin{frame}{Results}
\begin{itemize}
\item {\bf Online lower bound}: improve \mbox{[Kuhn et al 2010]} lower
  bound to $\Omega(nk/\log n)$.
  \begin{itemize}
  \item Implications: almost \alert{linear separation} between network
    coding and token-forwarding algorithms.
  \item To beat lower bound, either relax adversary model, or design
    non-token-forwarding based algorithms.
  \end{itemize}
\item {\bf Offline adversary}: adversary lays out the sequence of
  communication graphs ahead of time.
  \begin{itemize}
  \item $O(n\sqrt{k\log n})$ round algorithm.
  \item Bicriteria $(O(n^\epsilon),\log n)$ approximation algorithm
  \end{itemize}
\item Results under review, ICALP 2012. Joint work with Dutta,
  Pandurangan, Rajaraman.
\end{itemize}
\end{frame}

\subsection{Limitations of forwarding algorithms: a lower bound proof}
\begin{frame}
\begin{itemize}
\item An initial distribution of $k$ tokens.
\item Each round, algorithm decides which token to broadcast for each
  node.
\item Adversary decides the communication network each round.
\item Goal is to show the number of rounds needed is $\Omega(kn/\log
  n)$.
\end{itemize}
\end{frame}

\begin{frame}{Adversary model}
\begin{itemize}
\item {\bf Free-edge}: in round $r$, edge $(u,v)$ is a free-edge if at
  the start of round $r$, $u$ has the token $v$ broadcasts in round
  $r$ and $v$ has the token $u$ broadcasts in round $r$.
\item Adversary construct $G_r$ as follows:
  \begin{itemize}
  \item Add all free edges to $G_r$. Let $C_1, C_2, \dots, C_l$ be the
    connected components thus formed.
  \item Select an arbitrary node in each component and connect them in
    a line.
  \end{itemize}
\end{itemize}
\begin{figure}
  \includegraphics[width=.8\textwidth]{fig/adversary.jpg}
\end{figure}
\end{frame}

\begin{frame}{Critical structure}
\begin{itemize}
\item \alert{Half-empty}: We say a sequence of node
  $v_1,v_2,\dots,v_k$ is {\it half-empty} in round $r$ with respect to
  a sequence of tokens $t_1,t_2,\dots,t_k$ if for all $1\le i,j\le k,
  i\neq j$, either $v_i$ is missing $t_j$ or $v_j$ is missing $t_i$,
  at the beginning of round $r$.
\item Refer $\left( \langle v_i\rangle, \langle t_i \rangle \right)$
  as a \alert{half-empty configuration of size $k$}.
\item Draw connection between {\it useful token exchange} with this
  critical structure.
\end{itemize}
\begin{lemma}
If {\bf $m$ useful token exchanges} occur in round $r$, then there
exists a {\bf half-empty configuration of size $m/2 + 1$}.
\end{lemma}
\end{frame}

\begin{frame}
\begin{lemma}
If there exists half-empty configuration of size $k$ at round $r$, then there exists half-empty configuration of size $k$ at round {\bf $r'\le r$}.
\end{lemma}
\begin{itemize}
\item If the structure exists now, it exists before.
\item If we can identify a token distribution in which all half-empty
  configurations are small, we can guarantee small progress in each
  round.
%\item There are many token distributions with this property.
\end{itemize}
\end{frame}

\begin{frame}
\begin{theorem}
From an initial token distribution in which each node has each token
independently with probability $3/4$, lower bound $\Omega(kn/\log n)$
holds.
\end{theorem}
\begin{itemize}
\item If the number of tokens $k$ is less than $100\log n$, then
  $\Omega(kn/\log n)$ lower bound is trivially true.
\item Focus on the case where $k\ge 100\log n$
\item Want to show no big size half-empty configurations.
\end{itemize}
\end{frame}

\begin{frame}
\begin{itemize}
\item Let $E_l$ denote the event that there exists a half-empty
  configuration of size $l$ at the start of the first round.
\end{itemize}
\[ \prob{E_l} \le {n \choose l}\cdot \frac{k!}{(k-l)!}\cdot \rb{\frac{1}{2}}^{l \choose 2} \]
\begin{itemize}
\item ${n \choose l}$ is the number of ways of choosing the $l$ nodes
  that form the half-empty configuration.
\item $k!/(k-l)!$ is the number of ways of assigning $l$ distinct
  tokens.
\item $(1/2)^{{l \choose 2}}$ is the upper bound on the probability
  for each pair $i \neq j$ that either $v_i$ is missing $t_j$ or $v_j$
  is missing $t_i$, since there are ${l \choose 2}$ pairs.
\end{itemize}
\end{frame}

\begin{frame}
\[ \prob{E_l} \le {n \choose l}\cdot \frac{k!}{(k-l)!}\cdot \rb{\frac{1}{2}}^{l \choose 2} \le n^l \cdot k^l \frac{1}{2^{l(l-1)/2}} \le \frac{2^{2l\log n}}{2^{l(l-1)/2}} \]
\begin{itemize}
\item For $l = 5 \log n$, $\prob{E_l} \le 1/n^2$.
\item The largest half-empty configuration is of size $O(\log n)$
\item Number of useful token transmission is $O(\log n)$ each round.
\item By Chernoff bound, the number of tokens missing at the beginning
  is $\Omega(kn)$.
\item Thus, $O(kn/\log n)$ lower bound holds.
\end{itemize}
\end{frame}

\begin{frame}
\begin{theorem}
For an initial token distribution in which 1) each token starts at
exactly one node, and 2) no node has more than one token, lower bound
$\Omega(kn/\log n)$ holds.
\end{theorem}

\begin{theorem}
For an initial token distribution in which each token starts at
exactly one node, lower bound $\Omega(kn/\log n)$ holds.
\end{theorem}
\end{frame}

\begin{frame}
\begin{figure}
  \includegraphics[width=\textwidth]{fig/td-theorem-proof.jpg}
\end{figure}
\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% To sum up
\section{Conclusion}

\begin{frame}
  \tableofcontents[currentsection]
  % You might wish to add the option [pausesections]
\end{frame}

%\subsection{Summarize positive diffusion}
\begin{frame}
\begin{itemize}
\item Resource discovery through gossip:
  \begin{itemize}
  \item Propose triangulation and two-hop walk processes
  \item Give $O(n\log^2 n)$ upper bound and $\Omega(n\log n)$ lower
    bound in undirected graphs.
  \item Give $O(n^2\log n)$ upper bound and matching lower bound in
    directed graphs.
  \item Future research: 1) tight bound, 2) consider processes with
    edge deletion.
  \end{itemize}
\item Diffusion under adversarial dynamics:
  \begin{itemize}
  \item Show $O(kn/\log n)$ lower bound for token-forwarding
    algorithms under strong adversary.
  \item Give an $O(n\sqrt{k\log n})$ algorithm and a bicriteria
    $(O(n^\epsilon),\log n)$ approximation algorithm under offline
    adversary model.
  \item Future research: 1) token-forwarding algorithms against
    oblivious adversary, 2) best possible time achievable for gossip
    with small token sizes.
  \end{itemize}
\end{itemize}
\end{frame}


\begin{frame}
\begin{itemize}
\item Design good intervention strategies to control negative diffusion.
\item Design centralized algorithms to minimize the cost of negative
  diffusion.
  \begin{itemize}
  \item Give an $O(\log n)$ approximation algorithm
  \end{itemize}
\item Use game theory to analyze the setting where nodes make their
  own decision to secure themselves or not.
  \begin{itemize}
  \item Show the existence or non-existence of Nash equilibrium.
  \item Show hardness of finding Nash equilibrium.
  \item Bound price of anarchy.
  \end{itemize}
\item Results published in ICDCS 2010. Joint work with V.S. Kumar,
  Rajaraman, and Sundaram.
\end{itemize}
\end{frame}

\begin{frame}
\begin{itemize}
\item Study negative diffusion in the presence of risk behavior changes.
  \begin{itemize}
  \item Imperfect interventions.
  \item Nodes exhibit risk behavior changes due to perceived secure feelings.
  \end{itemize}
\item Less interventions can be more effective.
  \begin{itemize}
  \item Proofs for $G(n,p)$ graphs and locally-finite infinite graphs.
  \item Simulations on power-law graphs and real world data sets.
  \end{itemize}
\item Targeted interventions can be worse than random interventions.
  \begin{itemize}
  \item Simulations on power-law graphs and real world data sets.
  \end{itemize}
\item Results under submission. Joint work with V.S. Kumar, Rajaraman,
  and Sundaram.
\end{itemize}
\end{frame}

\end{document}

