\documentclass[12pt]{amsart}
\usepackage[utf8]{inputenc}
\usepackage{amsmath}
%\usepackage{fullpage}
\usepackage[all]{xy}
\title{Outline}
\date{}
\begin{document}
\maketitle

\section{Directed and undirected graphs}
A \emph{directed graph} $\vec{G}$ is a quadruple $(V,E,h,t)$ where $V$ and $E$ are disjoint sets and $h$ and $t$ are maps from set $E$ to set $V$. The set $V$ is the vertex set of the graph $G$ and its elements are called vertices; the set $E$ is the edge set and its elements are called directed edges; the map $h$ (resp. $t$) is called the head map (resp. the tail map) and for any directed edge $e$, the vertex $h(e)$ (resp. $t(e)$) is called the head (resp. the tail) of the directed edge. A directed edge whose head and tail are the same vertex is called a self-loop.

An \emph{undirected graph} is a pair $G=(\vec{G},\textrm{opp})$  where $\vec{G}=(V,E,h,t)$ is a directed graph and $\textrm{pop}$ an involution of the edge set $E$ such that following diagram commutes:

\[\xymatrix{
& E \ar[dd]^{\textrm{opp}} \ar[rd]^{h} \ar[ld]_{t}\\
V && V\\
& E \ar[ru]_{t} \ar[lu]^{h}
}\]\\

Given a directed edge $e$, we call the directed edge $\textrm{opp}(e)$ its opposite directed edge. The above diagram hence means that the head and tail of the opposite of a directed edge are respectively the tail and head of the original directed edge. In the case of self-loops, there are two possibilities: either its opposite directed edge is itself, in which case we call the pair a half-loop; or its opposite directed edge is another self-loop, in which case we call the pair a whole-loop.

The opposite map induces an equivalence relation on the directed edges of the directed graph $\vec{G}$ and we call the quotient set the undirected edges of the undirected graph $G$ and denote that set by $E_G$. An orientation of an undirected graph $G$ is the choice of a representative directed edge for each undirected edge. An undirected graph is thus entirely described by its vertex set and a set of oriented edges (which includes information about the head and tail of each oriented edge and which self-loops are whole-loops or half-loops). Given an oriented edge $e$, we will generically describe its corresponding opposite edge by $e^{-1}$. In this paper, unless otherwise mentioned, all graphs are undirected graphs.

\section{The $\mathcal{G}_{n,B}$ model}
Let $B$ be a graph whose self-loops, if any, are all whole-loops, with vertex set $V_B = \{v_1,\ldots,v_s\}$ and oriented edge set $E_B = \{ e_1,\ldots,e_a \}$. We denote by $\mathcal{G}_{n,B}$ the following model for $n$-lifts of the graph $B$. We obtain an element $G$ of $\mathcal{G}_{n,B}$ for each choice of $a$ permutations of $n$ elements $\{ \pi_1,\ldots,\pi_a \}$ with the following vertex and oriented edge sets:
\begin{align*}
V_G &= V_B \times \{ 1,\ldots,n \}\\
E_G &= \left\{ \left( (\textrm{tail}(e_j),i), (\textrm{head}(e_j),\pi_j(i)) \right) \mid j=1,\ldots,a \quad i=1,\ldots,n \right\}
\end{align*}
(Note: the head of the edge $\left( (\textrm{tail}(e_j),i), (\textrm{head}(e_j),\pi_j(i)) \right)$ is the vertex $(\textrm{head}(e_j),\pi_j(i))$ and its tail is the vertex $(\textrm{tail}(e_j),i)$).

We equip the set $\mathcal{G}_{n,B}$ with a discrete probability by giving a uniform distribution on each of its $(n!)^a$ elements.

\section{Old and New eigenvalues}
We know that all the eigenvalues of the base graph $B$ are going to be eigenvalues of any $n$-lift $G$. Among all the eigenvalues of a lift $G$ we distinguish the old ones as being the one coming from the base graph and refer to the other ones as the new eigenvalues. We denote by $\lambda_{\text{new}}(G)$ the maximal absolute value of a new eigenvalue of a lift $G$.

\section{Main Theorem}
The main theorem that we would like to prove is that for any $\varepsilon > 0$, there is a constant $c > 0$ such that a random element $G$ of the $\mathcal{G}_{n,B}$ model will satisfy $\lambda_{\text{new}}(G) \leq \rho + \varepsilon$ with probability at least $1-cn^{-\tau}$ where $\rho$ is the spectral radius of the universal cover of the base graph $B$ and where $\tau$ is a constant that only depends on the base graph.

\section{Walk sums}
A $(k,n)$-walk is a pair $(w,\vec{t})$ where $w = \sigma_1\ldots\sigma_k$ is a word of length $k$ in the alphabet $\Pi = \{ \pi_1, \pi_1^{-1},\ldots,\pi_a,\pi_a^{-1} \}$ which corresponds to a walk in the base graph $B$ (since each letter of the alphabet $\Pi$ corresponds to an oriented edge of $B$); and where $\vec{t} = (t_0,\ldots,t_k)$ is a $(k+1)$-tuple of integers between 1 and $n$. We denote by $\mathcal{E}(w,\vec{t})$ the event in $\mathcal{G}_{n,B}$ where the lift of the walk in $B$ corresponding to $w$ at the vertex $(\textrm{tail}(\sigma_1),t_0)$ has the trajectory described by $\vec{t}$, that is, has the trajectory
\[
(\textrm{tail}(\sigma_1),t_0) \rightarrow (\textrm{head}(\sigma_1),t_1) \rightarrow (\textrm{head}(\sigma_1),t_2) \rightarrow \ldots \rightarrow (\textrm{head}(\sigma_k),t_k)
\]
(Note: recall that since $w$ corresponds to a walk in $B$, it is already a given that $\textrm{head}(\sigma_i)=\textrm{tail}(\sigma_{i+1})$ for all $i=1,\ldots,k-1$). We denote by $P(w,\vec{t})$ the probability of that event.\footnote{Technically, the $\pi_j$ are random variables which describe the permutation associated to the $j^{\text{th}}$ edge of the base graph and $\pi_j^{-1}$ the inverse of that permutation.}

It is immediate that $P(w,\vec{t}) \neq 0$ if and only if the following two conditions are satisfied:
\begin{itemize}
\item[\textit{i)}] if $\sigma_i = \sigma_j$ then we have that $t_{i-1}=t_{j-1}$ if and only if $t_i=t_j$.
\item[\textit{ii)}] if $\sigma_i = \sigma_j^{-1}$ then we have that $t_{i-1}=t_j$ if and only if $t_i=t_{j-1}$.
\end{itemize}
We also have that
\[
P(w,\vec{t}) = \prod_{j=1}^a \frac{(n-a_j)!}{n!}
\]
where $a_j$ is the number of values of $\pi_j$ determined by $(w,\vec{t})$.

We can generalize the notion of walk sums and walk collections to this context, it doesn't seem any of it should be different here

\section{Forms and Types}
For Forms and Types, the whole theory works as well, the only difference lies in what information to collect and how. We want to also keep track of which vertices of the base graph we are visiting.





\end{document}