\documentclass[11pt, draft]{article}
\usepackage{amsmath, amsthm, amssymb}
\usepackage[english]{babel}
\usepackage{url}
\usepackage{proof}
\usepackage{multirow}

\newtheorem*{subslem}{Substitution Lemma}
\newtheorem*{equivthm}{Equivalence Theorem}

\newcommand{\lc}{$\lambda$-Calculus}
\newcommand{\lam}{$\lambda$}

\author{Ali Assaf, Abbie Desrosiers, Alexandre Tomberg}
\title{Implementing a Dependently Typed \lc}

\begin{document}
\maketitle



\begin{abstract}
We explore three representations of \lc: FOAS, De Bruijn, and HOAS, and their implementation in SML, Twelf and Python. We also cover simple, polymorphic and dependent type systems. We discuss the advantages and disadvantages of each approach and provide a comparison of the languages used. Finally, we prove the equivalence between FOAS and De Bruijn.

All the code produced for this project can be found at the project webpage \cite{web}.
\end{abstract}

\tableofcontents



\section{Introduction}
\label{sec:Introduction}
\lc\mbox{} is a theoretical language that expresses and manipulates functions. It is expressive and powerful. In fact it is considered Turing complete, meaning that it can express \emph{any} computable function. It is the foundation of functional programming languages and constructive logic\cite{Selinger}.

Implementing \lc\mbox{} is a challenging problem. Our project focused on exploring various representations of \lc\mbox{} and methods that can be employed to implement them. We also looked at typed \lc\mbox{} and its extension to polymorphic and dependent types. Finally, we attempted to prove the equivalence between two \lc\mbox{} such representations.



\section{Untyped \lc}
\label{sec:Untyped}

\subsection{The \lc}
\label{sec:lamcal}
\lc\mbox{} is a very simple language. It can be summarized in a few lines.
\begin{equation} \label{eq:LamTerms}
\mbox{Terms: } M ::= x\; |\; \lambda x . M\; |\; M N
\end{equation}
Expressions can be evaluated using the following rule
\[
	(\lambda x . M) N \Rightarrow [N / x] M
\]
where $[N / x]$ is a capture-avoiding substitution over the free occurances of x in M.

Implementing \lc\mbox{} is not as trivial as summarizing it. In our project, we have explored three implementation strategies, namely \emph{First Order Abstract Syntax} (FOAS), \emph{De Bruijn Index} and \emph{Higher Order Abstract Syntax} (HOAS). We shall discuss them one by one. Later, in Section \ref{sec:EquivProof}, we prove the equivalence between FOAS and De Bruijn Index.

\subsection{First Order Abstract Syntax}
FOAS is the most natural implementation of \lc. In fact, it mimics the way we think about \lam-abstractions, by assigning a string name to every variable.
\[
 \mbox{Terms: } M ::= \operatorname{var} ``x" \; |\; \operatorname{lam} ``x" . M\; |\; \operatorname{app} M N 
\]
However, this approach has a number of disadvantages, most important of which are
\begin{itemize}
	\item Absence of immediate $\alpha$-equivalence: two identical functions using different variable names are \emph{a priori} different.
	\item Free variable capture: this problem arises during substitution. If done na\"ively, free variables may become bound if they are not renamed properly.
\end{itemize} 
Because of these problems, we decided not to focus on this implementation strategy, and our only attempt is discussed in Section \ref{sec:Python}.

\subsection{De Bruijn Index} Another implementation strategy, due to De Bruijn, consists of labeling variables inside terms by the distance to their introducing binder.
\begin{equation} \label{eq:deBruijnTerms}
  \mbox{Terms: } M ::= n \; |\; \operatorname{lam} M\; |\; \operatorname{app} M N 
\end{equation}
where $n$ is a natural number. For example, the function $lambda x . (\lambda y. x y)$ becomes $\lambda . (\lambda . 2) 1$ This way, we get a \emph{unique} representation for each term, and hence, $\alpha$-renaming is not a concern.

However, this notation is not intuitive and becomes cumbersome to manipulate. In particular, it requires shifting indices during substitution to avoid variable capture. Indeed, if we substitute a term for a variable inside a lambda body, we need to update the distances of free variables inside that term. For example, in our SML implementation, at substitution, we call the shift function:
\begin{verbatim}
fun shift (var x) l b = if x > b then var (x + l) else var x
  | shift (lam m) l b = lam (shift m l (b + 1))
  | shift (app (m, n)) l b = app(shift m l b, shift n l b)
\end{verbatim}

\subsection{Higher Order Abstract Syntax (HOAS)}
The idea behind this strategy is to use the features of the meta-language. Binders and variables in our language are represented by functions and variables in the host language.
\[
 \mbox{Terms: } M ::= \operatorname{lam} f\; |\; \operatorname{app} M N \mbox{,}
\]
where $F$ is a function from term to term. Since substitution is already implemented in the host language, we can use it directly
\[
 (\operatorname{lam } f) N \Rightarrow f N
\]
without worrying about $\alpha$-renaming which is done automatically.

This approach leads to very short and efficient implementations. We have tried this approach in both Twelf and SML, and the code can be found in Figure \ref{fig:HOAS}.

\begin{figure}
 \centering
	\begin{verbatim}
datatype exp = lam of (exp -> exp) | app of exp * exp;

fun eval (lam f) = (lam f)
  | eval (app (m, n)) =
      case eval m of
        lam f => eval (f n)
      | m' => app (m', n) 
	\end{verbatim}
\begin{center}
 \rule{50pt}{.5pt} \rule[-.5pt]{2pt}{2pt} \rule{50pt}{.5pt}
\end{center}
	\begin{verbatim}
exp : type.
lam : (exp -> exp) -> exp.
app : exp -> exp -> exp.

eval : exp -> exp -> type.
eval/lam : eval (lam F) (lam F).
eval/app : eval (app M N) M'
	<- eval M (lam F)
	<- eval (F N) M'.
	\end{verbatim}
\label{fig:HOAS}
\caption{Our implementations of HOAS in SML (top) and Twelf (bottom)}
\end{figure}


\section{HOAS in Python} \label{sec:Python}

\subsection{The need for a new paradigm}
In this section we present a novel approach to working with Higher Order Abstract Syntax. Present mainstream languages do not allow us to work with HOAS easily\cite{Washburn}. SML, for example, does not support pattern matching on functions, and prevents us from looking inside them. Thus, we cannot even verify if our implementation is correct as we cannot look inside our terms! This can be solved by evaluating the function on a dummy value, but the static type system in SML makes it very hard. Another problem is that, in some situations, it can be difficult  to generate terms. Of course, it is very easy to write them in the code, but what if we wanted a parser, or a translator from first-order to higher-order abstract syntax?

To solve these problems, we chose to explore a different paradigm. We propose the Python programming language to work around these difficulties, and we shall now discuss this choice.


\subsection{Dynamic types}
Python is dynamically typed, so that we can easily look inside functions, since we can pass them anything. We can thus display our functions by passing them a dummy object which, in our case, is just a string containing the variable name! Python makes it really easy. We have implemented untyped lambda calculus, and provided methods for displaying terms.
\begin{verbatim}
>>> I = Lam(lambda x: x)
>>> I
Lam(lambda x: x) 
\end{verbatim}

We have also implemented a pretty-printer which displays the terms in the usual way we write them, with the minimal number of parenthesis, and with different strategies for choosing variable names: we either number them,
\begin{verbatim}
>>> print I
lam x1. x1
\end{verbatim}
or select them at random from a predefined alphabet,
\begin{verbatim}
>>> print I
lam u. u
\end{verbatim}
or even choose the original internal variable name (Python supports introspection)!
\begin{verbatim}
>>> print I
lam x. x
\end{verbatim}

\subsection{Code generation}
Another interesting feature of Python is its flexible meta-programming and code generation. At runtime, we can generate a string for a piece of code and evaluate it to get back an object. We can use this to generate terms in HOAS, which could be very useful in the context of a parser, or a translator.

We can even use this for substitution in first order abstract syntax, where the abstract syntax is a usual tree with explicit variables. However, for substitution, we convert the tree into a Python function, and call it on the argument. We achieve this by generating a string, where we transform variables into their names and our lambda terms into Python expressions.
\begin{verbatim}
class Lam(Term):
    ...
    def __call__(self, arg):
        code = "lambda %s: %s" % (self.name, self.body.hoas())
        f = eval(code)
        return f(arg)
\end{verbatim}
When we evaluate the string, we get back a function which captures the variables, so we can use it to substitute the argument. We also need to make sure that we retrieve the lambda terms and the variables inside the body after the call. By generating the code appropriately, we do this automatically at the function call.

\begin{figure}
	\centering
		\begin{verbatim}
class Lam(Term):
    ...
    def __call__(self, arg):
        code = "lambda %s: %s" % (self.name, self.body.hoas())
        f = eval(code)
        return f(arg)
    def evaluate(self):
        return Lam(self.name, self.body)
    def hoas(self):
        return "(lambda %s: Lam('%s', %s))(Var('%s'))" %(
            self.name, self.name, self.body.hoas(), self.name)

class Var(Term):
    ...
    def evaluate(self):
        return Var(self.name)
    def hoas(self):
        return self.name

class App(Term):
    ...
    def evaluate(self):
        return (self.left.evaluate())(self.right).evaluate()
    def hoas(self):
        return "App(%s, %s)" % (self.left.hoas(), self.right.hoas())
  		\end{verbatim}
	\caption{Using code generation to implement substitution in FOAS.}
	\label{fig:PythonFOAS}
\end{figure}



\section{Type Systems} \label{sec:Types}

\subsection{The problem with \lc}
Consider the function $\omega = \lambda x . (x\: x)$. Suppose we wish to apply it to itself:
\[
 	\omega\: \omega = (\lambda x . (x\: x)) \: \omega \Rightarrow [\omega / x](\lambda x . (x\: x)) \Rightarrow \omega\: \omega  \Rightarrow \ldots
\]
This evaluation never terminates. This is true no matter the evaluation strategy we use (call by name, by value). In that case, we say that $( \omega\: \omega )$ has no normal form. Therefore, we need a way to check that a given term has a normal form without performing the evaluation. We solve this problem by introducing types.

\subsection{Types}
Just like functions that one studies in mathematics, we restrict the \lam terms to a fixed domain $A$ and codomain $B$. We call these \emph{types}. Our types consist of base types, and arrow types, which we associate to \lam-abstractions.
\[
 \mbox{Types: } \tau ::= \alpha\; |\; \tau \rightarrow \sigma
\]
where $\alpha$ is a base type. The typing rules are listed in Figure \ref{fig:TypingRules}.
\begin{figure}
\[\begin{array}{lc}
	(var) & \infer{\Gamma, x \in A \vdash x \in A}{} \\
	(app) & \infer{\Gamma \vdash M N \in B}{
		\Gamma \vdash M \in A \rightarrow B 
		&
		\Gamma \vdash N \in A } \\
	(lam) & \infer{\Gamma \vdash \lambda x:A. M \in A \rightarrow B}{
		\Gamma, x \in A  \vdash M \in B}\\
\end{array}\]
\label{fig:TypingRules}
\caption{Typing rules in \lc}
\end{figure}


Let us now check what happens to the function $\omega$. $\omega = \lambda x . (x\: x)$ is a lambda abstraction so $\omega$ must belong to some type $\alpha \rightarrow \beta$. This implies $x$ has type $\alpha$. But in the body we apply $x$ to itself and return the result, so $x$ must also be of type $\alpha \rightarrow \beta$. Since $\alpha \neq \alpha \rightarrow \beta$, we see that $\omega$ cannot have a valid type. Indeed, it can be shown that simply typed lambda calculus \emph{strongly normalizing}, meaning that evaluation always terminates, no matter which strategy we use.

\subsection{Implementing simply typed \lc}
To implement types we need to add type definitions.
\begin{verbatim}
t: type.
base: t.
arrow: t -> t -> t.
infix right 10 arrow.
\end{verbatim}
We also need to implement the typing tules. This can easily be done in Twelf, as it is very well suited for such formal systems. There are two approaches to this.
\paragraph{Curry-style semantics}
The first is to write a type checker, which simply verfies that a term has a given type according to the the typing rules.
\begin{verbatim}
exp: type.
lam: t -> (exp -> exp) -> exp.
app: exp -> exp -> exp.

check: exp -> t -> type.
check/lam: check (lam A F) (A arrow B)
    <- {x:exp} (check x A -> check (F x) B).
check/app: check (app M N) B
    <- check M (A arrow B)
    <- check N A.
\end{verbatim}
Note that we now specify the type of the argument in the lambda term. We can now type check our expressions.
\begin{verbatim}
query 1 * check (lam base ([x] x)) (base arrow base).
---------- Solution 1 ----------
Empty Substitution.
\end{verbatim}
This last line indicates that the query is trivially true (i.e. without any substitution).

But Twelf is even more powerful: we can ask it to do type inference. This is possible by querying the type checker against an undefined type.
\begin{verbatim}
query 0 * check (lam base ([x] x)) T.
---------- Solution 1 ----------
T = base arrow base.
\end{verbatim}
The type checker refuses the $\omega$ term we saw eariler, and we can verify this by querying the Twelf server.
\begin{verbatim}
query 0 * check omega T.
\end{verbatim}

This approach, where one first constructs a term, and then type checks it is named after Haskell Curry. Note that it allows ill-typed terms to be created, although they would not type check.

\paragraph{Church-style semantics}
A different approach, named after Alonzo Church, is to directly annotate expressions with their types.
\begin{verbatim}
exp: t -> type.
lam: (exp A -> exp B) -> exp (A arrow B).
app: exp (A arrow B) -> exp A -> exp B.
\end{verbatim}

Note that we do not need a type checker as the typing rules are built into the expression definitions. Moreover, ill-typed terms cannot even be created! As a consequence, during evaluation, the type information is simply ignored. This also has another advantage: we don't need to prove that types are preserved during evaluation. We discuss this further in section \ref{sec:dependent}.


\subsection{Polymorphism}

\paragraph{Limitations of simple types}
Adding types to \lc\mbox{} introduces a number of limitations. Since evaluation always terminates, the language is no more Turing complete. In particular, it does not support recursion. Another problem, arising from type annotations, is that terms can only be applied to one specific type. For example, the identity function from $\alpha$ to $\alpha$ can only be applied to elements of type $\alpha$, even though the same code should work for elements of any other type.

This is true for both Church and Curry-style types, because in the latter, the type of the full term is completely determined by the type of it's argument. In that case, we could choose to ommit this annotation of argument types, but we would still need to type check our expressions against a specific type, or implement type inference. 

\paragraph{System F}
This last problem has led to the extension of the simple type system to polymorphic types. The idea is very simple: we allow abstractions over types.
\[
\begin{array}{l}
 \mbox{Terms M } ::= \ldots \; |\; \Lambda \alpha. M\; |\; M [\tau] \\
 \mbox{Types $\tau$ } ::= \ldots \; |\; \forall \alpha. \tau
\end{array}
\]
where $\Lambda$ is the quantifier used for type abstraction. For instance, the identity function is written as $\Lambda \alpha. \lambda x^\alpha. x$, and has type $\forall \alpha. \alpha \rightarrow \alpha$. Polymorphic terms can be instanciated by applying them to types. We can use the identity function $I$ on an element $x$ of type $\alpha$ by writing $I [\alpha] x$. The resulting type system is called \emph{System F}.

We have implemented polymorphic types in Twelf (Figure \ref{fig:SystemF}). As usual, the implementation is straightforward, as we only need to extend our definitions and transcribe the new typing rules.

\begin{figure}
	\centering
		\begin{verbatim}
t : type.
atom : name -> t.
arrow : t -> t -> t.
forall : (t -> t) -> t.
infix right 10 arrow.

exp : type.
lam : t -> (exp -> exp) -> exp.
app : exp -> exp -> exp.
LAM : (t -> exp) -> exp.
APP : exp -> t -> exp.

check : exp -> t -> type.
check/lam : check (lam A M) (A arrow B)
    <- {x:exp} (check x A -> check (M x) B).
check/app : check (app M N) B
    <- check M (A arrow B)
    <- check N A.
check/LAM : check (LAM F) (forall G)
    <- {a:t} (check (F a) (G a)).
check/APP : check (APP M T) (G T)
    <- check M (forall G).
  		\end{verbatim}
\caption{System F implementation in Twelf}
\label{fig:SystemF}
\end{figure}

\subsection{Dependent Types} \label{sec:dependent}
Dependent types provide another way to extend the type system. The idea is similar to polymorphism, but this time we allow types to depend on values. This can be very useful to encode our types with additionnal information. For example, we can have a family of types representing vectors. Each vector type is indexed by it's length.
\[
\forall \alpha:*. \forall n:\operatorname{Nat}. \operatorname{Vec}( \alpha, n)
\]
This would allow us, for instance, to write an append function which takes a vector of length $n$ and returns a vector of length $n+1$. Since we are carrying the length of the vector around, we would not need to prove that the lengths of the vectors behave as expected.

We already used this idea in the Church-style type implementation, where we encoded type information in expressions. As a consequence, we did not need to prove that terms evaluate to terms of the same type. Unfortunately, we did not have the necessray time (or knowledge) to complete an implementation of dependent types.



\section{Equivalence of Representations} \label{sec:EquivProof}

We have seen how we can use different methods to represent \lc. However it is important to make sure that these are all equivalent representations. In this section, we will attempt to prove the equivalence between FOAS and De Bruijn. We will assume that all terms are closed and that the evaluation does not go inside \lam-bodies (i.e. we consider \lam abstractions to be values).

\paragraph{Equivalence of Languages} Two languages are equivalent if every term in the first language can be represented by an equivalent term in the second language and vice versa. Equivalence in terms can be defined using a few simple translation rules. However, not only do the terms have to translate to each other, the translation has to be invariant under reductions. That is, if term $A$ in the first language reduces to $A'$ and term $B$ in the second language reduces to $B'$ then, if $A$ and $B$ are equivalent, $A'$ and $B'$ should also be equivalent (assuming $A'$ and $B'$ are normalized terms and the reduction order is clearly defined). If two languages are equivalent, then neither is more computationally powerful than the other.

\paragraph{Translation} Recall that terms in \lc\mbox{} are defined as in \eqref{eq:LamTerms}, and De Bruijn indices are made up of terms as in \eqref{eq:deBruijnTerms}. Equivalence between a De Bruijn term M and a \lam-term M' is written M $\leftrightarrow$ M' and defined with the rules listed in Figure \ref{fig:TransRules}.

Note that, in the $\leftrightarrow$lam rule, $x$ must be a fresh variable. That is, it must not occur anywhere is the rest of the context $\Gamma$. This prevents variable capture and can always be done through $\alpha$-renaming. The \textbf{inc} function is a recursive function on contexts defined by
\[ 
\begin{array}{l}
 \operatorname{inc}(\cdot) = \cdot \\
 \operatorname{inc}(\{x = n, \Gamma\}) = \{ x = n + 1, \operatorname{inc}(\Gamma) \}
\end{array}
\]
These rules cover all closed terms in both languages, so any term in one language can be translated into any term in the other language. But are the languages equivalent?

\begin{figure}
 \[\begin{array}{lc}
 	(\leftrightarrow var) & \infer{\Gamma_1,x=n,\Gamma_2 \vdash n \leftrightarrow x}{}\\
	(\leftrightarrow app) & \infer{\Gamma \vdash M N \leftrightarrow M' N'}
	{ \Gamma \vdash M \leftrightarrow M' & \Gamma \vdash N \leftrightarrow N'}\\
	(\leftrightarrow lam) & \infer{\Gamma \vdash \lambda . M \leftrightarrow \lambda x . M'}
	{ inc(\Gamma),x = 1 \vdash M \leftrightarrow M'}\\
\end{array} \]
\label{fig:TransRules}
\caption{Translation rules between the De Bruijn's indecies and \lc}
\end{figure}

\paragraph{Equivalence of substitutions} Substitution in De Brujin representation is similar to that of the standard \lc. The main difference is incrementing the value of the index being substituted to account for scoping. Substitution can be defined as follows:
\[
 \begin{array}{l}
[N/n] n \rightarrow N \\ \mbox{}
[N/n] m \rightarrow m \quad (\mbox{if }m \neq n) \\ \mbox{}
[N/n] \lambda M \rightarrow \lambda([N/(n+1)] M) \\ \mbox{}
[N/n] M_1 M_2 \rightarrow [N/n]M_1 [N/n] M_2 \\
 \end{array}
\]
Then the evaluation of applications of \lam-terms can be defined as:
\[
  (\lambda M_1)M_2 \rightarrow [M_2/1]M_1 
\]


\begin{subslem}
 If $\mathcal{D} : \Gamma_1,x=n,\Gamma_2 \vdash M_1 \leftrightarrow N_1$, and $\mathcal{E} : \Gamma \vdash M_2 \leftrightarrow N_2$, then $\mathcal{F} :\Gamma_1,x=n,\Gamma_2, \Gamma \vdash [M_2/n]M_1 \leftrightarrow [N_2/x]N_1$
\end{subslem}
\begin{proof}
 By induction on $\mathcal{D}$.
\paragraph{Case} $\mathcal{D} : \Gamma_1,x=n,\Gamma_2 \vdash n \leftrightarrow x$
\[
 \begin{array}{ll}
  [M_2 / n] n \Rightarrow M_2 & \mbox{by DeBruijn substitution}  \\ \mbox{}
  [N_2 / x] x \Rightarrow N_2 & \mbox{by \lam-substitution}\\ \mbox{}
  \Gamma \vdash M_2 \leftrightarrow N_2 & \mbox{by $\mathcal{E}$}\\ \mbox{}
  \Gamma_1,x=n,\Gamma_2 \vdash M_2 \leftrightarrow N_2 & \mbox{by weakening}\\ \mbox{}
 \end{array}
\]

\paragraph{Case} $\mathcal{D} : \Gamma_1,x=n,\Gamma_2, y = m, \Gamma_3 \vdash y \leftrightarrow m$
\[
 \begin{array}{ll}
  [M_2 / n] m \Rightarrow m & \mbox{by DeBruijn substitution}  \\ \mbox{}
  [N_2 / x] y \Rightarrow y & \mbox{by \lam-substitution}\\ \mbox{}
  \Gamma_1,x=n,\Gamma_2, y = m, \Gamma_3 \vdash y \leftrightarrow m & \mbox{by $\mathcal{D}$}\\ \mbox{}
  \Gamma_1,x=n,\Gamma_2, y = m, \Gamma_3, \Gamma \vdash y \leftrightarrow m & \mbox{by weakening}\\ \mbox{}
 \end{array}
\]

\paragraph{Case} $\mathcal{D} : \Gamma_1,x=n,\Gamma_2 \vdash M_1 M_1' \leftrightarrow N_1 N_1'$
\[
 \begin{array}{ll}
  \begin{array}{l}
  \Gamma_1,x=n,\Gamma_2 \vdash M_1 \leftrightarrow N_1 \\ 
  \Gamma_1,x=n,\Gamma_2 \vdash M_1' \leftrightarrow N_1' 
  \end{array} & \mbox{by inversion on $\leftrightarrow$app}\\ \mbox{}

  \begin{array}{l}
  \Gamma_1,x=n,\Gamma_2, \Gamma \vdash [M_2/n]M_1 \leftrightarrow [N_2/x]N_1 \\ 
  \Gamma_1,x=n,\Gamma_2, \Gamma \vdash [M_2/n]M_1' \leftrightarrow [N_2/x]N_1' 
  \end{array} & \mbox{by induction hypothesis}\\ \mbox{}

  [M_2 / n] M_1 M_1' \Rightarrow [M_2 / n] M_1 [M_2 / n] M_1' & \mbox{by DeBruijn substitution}  \\ \mbox{}
  [N_2 / x] N_1 N_1' \Rightarrow [N_2 / x] N_1 [N_2 / x] N_1' & \mbox{by \lam-substitution}\\ \mbox{}

  \begin{array}{l}
  \Gamma_1,x=n,\Gamma_2, \Gamma \vdash \\ 
  \quad [M_2 / n] M_1 [M_2 / n] M_1' \leftrightarrow [N_2 / x] N_1 [N_2 / x] N_1'
  \end{array} & \mbox{by $\leftrightarrow$app}\\ \mbox{}
 \end{array}
\]

\paragraph{Case} $\mathcal{D} : \Gamma_1,x=n,\Gamma_2 \vdash \lambda M_1 \leftrightarrow \lambda y . N_1$
\[
 \begin{array}{ll}
  \operatorname{inc}(\Gamma_1,x=n,\Gamma_2), y=1 \vdash M_1 \leftrightarrow N_1  & \mbox{by inversion on $\leftrightarrow$lam}\\ 
  \operatorname{inc}(\Gamma_1), x = n+1, \operatorname{inc}(\Gamma_2), y=1 \vdash M_1 \leftrightarrow N_1  & \mbox{by inc}\\
  
  \begin{array}{l}
  \operatorname{inc}(\Gamma_1), x = n+1, \operatorname{inc}(\Gamma_2), y=1, \Gamma \vdash \\ 
  \quad [M_2 / (n+1)] M_1 \leftrightarrow [N_2 / x] N_1
  \end{array} & \mbox{by induction hypothesis}\\ \mbox{}

  \begin{array}{l}
  \operatorname{inc}(\Gamma_1,x=n,\Gamma_2), y=1, \Gamma \vdash \\ 
  \quad [M_2 / (n+1)] M_1 \leftrightarrow [N_2 / x] N_1
  \end{array} & \mbox{by inc}\\ \mbox{}

  \begin{array}{l}
  \Gamma_1,x=n,\Gamma_2 \vdash \Gamma \vdash \\ 
  \quad \lambda [M_2 / (n+1)] M_1 \leftrightarrow \lambda y . [N_2 / x] N_1
  \end{array} & \mbox{by $\leftrightarrow$lam}\\ \mbox{}
 \end{array}
\]
\end{proof}

\begin{equivthm}
 Let $A$ be a De Bruijn term and $B$ be a lambda term such that $A$ and $B$ are equivalent. Then if $A$ reduces to $A'$ and $B$ reduces to $B'$ then $A'$ and $B'$ are equivalent.
\end{equivthm}

\begin{proof}
 By induction on the reduction rules of the lambda calculus.
\paragraph{Case} $A = n,\: B = x$ \\
    Both $A$ and $B$ are values and therefore cannot be reduced any further.
\paragraph{Case} $A = \lambda M$, and $B = \lambda x.M $\\
    Both $A$ and $B$ are values and therefore cannot be reduced any further.
\paragraph{Case} $A = M_1 M_2$, $B = N_1 N_2$ and $B'$ obtained using $app$ rule:
\begin{equation}\label{eq:lamapp}
 \infer{N_1 N_2 \rightarrow N_1' N_2}{N_1 \rightarrow N_1'}
\end{equation}
And so, $B' = N_1' N_2$. Similarly, by $app$ for De Bruijn, we get $A' = M_1' M_2$
\[
 \begin{array}{ll}
  M_1 \leftrightarrow N_1 \mbox{ and } M_2 \leftrightarrow N_2 & \mbox{by inversion on $\leftrightarrow$app}\\
  M_1' \leftrightarrow N_1' & \mbox{by induction hypothesis} \\
 \end{array}
\]
And finally, by $\leftrightarrow$app we get
\[
 \infer{M_1' M_2 \leftrightarrow N_1' N_2}{M_1' \leftrightarrow N_1' & M_2 \leftrightarrow N_2} 
\]
\paragraph{Case} $A = (\lambda M_1) M_2$, $B = (\lambda x . N_1) N_2$ and $B'$ obtained using $app$ rule \eqref{eq:lamapp}.
\[
 (\lambda x . N_1) N_2 \overset{app}{\rightarrow} [N_2/x]N_1 \Longrightarrow B' = [N_2/x]N_1
\]
Similarly, $A' = [M_2/1]M_1$, and so,
\[
 \begin{array}{ll}
  \lambda M_1 \leftrightarrow \lambda x . N_1 \mbox{ and } M_2 \leftrightarrow N_2 & \mbox{by inversion on $\leftrightarrow$app}\\
  x = 1 \vdash M_1 \leftrightarrow N_1' & \mbox{by induction $\leftrightarrow$lam} \\ \mbox{}
  x = 1 \vdash [M_2/1]M_1 \leftrightarrow [N_2/x]N_1 & \mbox{by Substitution Lemma}\\
 \end{array}
\]

\end{proof}



\section{Discussion}
We have presented in sections \ref{sec:Untyped}, \ref{sec:Python} and \ref{sec:Types} different approaches to implement \lc, and we mentioned that certain languages are better adapted for some of them. We will now discuss this further.

\paragraph{Representations} We found that HOAS is generally much easier to use in most settings. It is very straightforward and easy to code. However, it can be problematic in some languages. In contrast, De Bruijn approach is harder to code, but provides a unique representation for every term and allows an easier evaluation inside \lam-terms. On the other hand, FOAS is the hardest to implement, but is still useful for interacting with the user, at least as an intermediary step (see Python implementation).

\paragraph{Languages} Twelf shines in implementing types. Being a logical framework, it allows us to write typing rules very easily. In addition, its built-in inference engine allows us to do type inference automatically. Finally, it supports dependent types, which we used for our Church-style implementation of types. SML, although usually inferior to Twelf in ease of use, proved to be better for De Bruijn strategy. Indeed, it has a built-in integer system, and allows us to write functions more easily. 

However, as described before, functionnal languages are not well suited for interacting with HOAS. Twelf also lack basic features like good I/O and string manipulation, and as such is not ideal for realistic systems. Python solves these issues gracefully. It also provides another approach to \lc\mbox{} through the object opriented paradigm. However, the resulting implementation is less robust, and it places a greater resposability on the programmer, unlike SML where we can type check our program, or Twelf, where we can even prove properties about our system.



\section{Conclusion}
We have covered and compared different methods that can be used to implement untyped and typed \lc, focusing on FOAS, De Bruijn indices, and HOAS. We have discussed how we can implement these in Twelf, SML and Python. We have also implemented types, and shown how to extend the typing system with polymorphic and dependent types. Although our original goal was to implement dependent types, we did not have enough time or theoretical background to complete it.

A lot can be done from here. We have yet to attempt the following implementations:
\begin{itemize}
 \item FOAS representation with explicit substitution.
 \item Types in SML and Python.
 \item Dependent types.
 \item Parser and interpreter.
\end{itemize}
We can also prove type preservation during evaluation, and implement some of the proofs in Twelf.

In \cite{Loh}, a tutorial on how to implement dependent types in Haskell is provided. It also describes an interesting approach which combines different representations (explicit variables, De Bruijn index, and HOAS).  An alternative approach to implement all the type systems is presented in \cite{Augustsson}. We have seen how HOAS is usually the simlest way to implement \lc, but that it is not commonly used because of the difficuties that can arise. A possible solution to this problem is discussed in \cite{Washburn}.



\begin{thebibliography}{9}
\bibitem{web} Project Webpage, \url{http://aaa-comp527project.googlecode.com}

\bibitem{Loh} A. Loh, C. McBride, W. Swierstra, \emph{A tutorial implementation of a dependently typed lambda calculus}

\bibitem{Augustsson} Lennart Augustsson, \emph{Simpler, Easier!}, \url{http://augustss.blogspot.com/2007/10/simpler-easier-in-recent-paper-simply.html}

\bibitem{Selinger} Peter Selinger, \emph{Lecture Notes on the Lambda Calculus}, \url{http://www.mathstat.dal.ca/~selinger/papers/lambdanotes.pdf}

\bibitem{Girard} Jean-Yves Girard, Yves lafont, Paul Taylor, \emph{Proofs and Types}, Cambridge University Press, Cambridge, 1989

\bibitem{Pierce} Benjamin C. Pierce, \emph{Advanced Topics in Types and Programming Languages}, MIT Press, Cambridge, 2005

\bibitem{Xi} Hongwei Xi, Frank Pfenning, \emph{Dependent Types in Practical Programming (Extended Abstract)}

\bibitem{Washburn} Geoffrey Washburn, Stephanie Weirich, \emph{Boxes Go Bananas: Encoding Higher-Order Abstract Syntax with
Parametric Polymorphism}, Proceedings of the eighth ACM SIGPLAN international conference on Functional programming, Uppsala, Sweden, p. 249-262, 2003

\end{thebibliography}
\end{document}
