%!TEX root = thesis.tex

In this chapter, we show how to improve fuzzy extractors by incorporating additional structure of the source distribution $W$.  We begin by definition fuzzy min-entropy which describes a noisy distribution's suitability for key derivation (\secref{sec:minimal conditions}).  We then show how to construct fuzzy extractors if a distribution is exactly known (\secref{sec:known distributions}).  However, we show that distributional uncertainty comes at a cost.  We show if a distribution $W$ is only known to come from a family of distributions $\mathcal{W}$, then it may be impossible to construct a fuzzy extractor (\secref{sec:family of dist}).

\section{Fuzzy Min-Entropy: a Necessary Condition}
\label{sec:minimal conditions}
The value $p$ allows everyone, including the adversary, to find the output of $\rep(\cdot, p)$ on any input $w'$. Ideally,  $p$ should not provide any useful information beyond this ability, and the outputs of $\rep$ on inputs that are too distant from $w$ should provide no useful information, either.  In this ideal scenario, the adversary is limited to trying to guess a $w'$ that is $t$-close to $w$. 
Letting $w'$ be the center of the maximum-weight ball in $W$ would be optimal for the adversary.
We therefore measure the quality of a source by (the negative logarithm of) this weight.

\begin{definition}
\label{def:fuzzy min-ent}
The $t$-fuzzy min-entropy of a distribution $W$ in a metric space $(\mathcal{M}, \dis)$ is:
\[
\Hfuzz(W) = -\log \left(\max_{w'}  \sum_{w\in W | \dis(w, w')\le t} \Pr[W=w] \right)
\]
\end{definition}
\noindent
Fuzzy min-entropy is a necessary condition for security:
\begin{proposition}
\label{prop:fuzz necessary}
Let $W$ be a distribution over $(\mathcal{M}, \dis)$ and let $n= \log |\mathcal{M}|$.
If $\Hfuzz (W) = \Theta(\log n)$ there is no $(\mathcal{M}, W, \kappa, t)$-fuzzy extractor %that is $(\max |\mathcal{M}| +  |\rep|, \epsilon)$-hard for $\epsilon = \ngl(n)$
 with error $\delta = \ngl(n)$ for $\kappa =\omega(\log n)$.
\end{proposition}
\begin{proof}
Let $W$ be a distribution where $\Hfuzz(W) = \Theta(\log n)$.  This means that there exists a point $w' \in \mathcal{M}$ such that $\Pr_{w\in W}[\dis (w, w')\leq t] \geq 1/\poly(n)$.  Consider the following distinguisher $D$:
\begin{itemize}
\item Input $\key, p$.
\item If $\rep(w', p) = \key$, output $1$.
\item Else output $0$.
\end{itemize}
Clearly, $\Pr[D(\Key, P) = 1]\geq 1/\poly(n) - \delta$, while $\Pr[D(U_\kappa, P)=1 ]= 1/2^{-\kappa}$.  Thus, when $\kappa = \omega(\log n)$:
\[
\delta^D((\Key, P), (U_\kappa, P))\geq \frac{1}{\poly(n)} -\delta -  \frac{1}{2^{-\kappa}} = 1/\poly(n).
\]
Note that $D$ only provides an input and looks at the output, thus it extends to an interactive protocol.  Also, $D$ is of size $\max |\mathcal{M}|+ |\rep|$ where $\max |\mathcal{M}|$ is the longest description of an item in the metric space.  Thus, $D$ is also a distinguisher in the computational setting.
\end{proof}

\paragraph{Generalizing to correlated random variables}
Instead of considering $w, w'$ that have bounded distance, we treat $W, W'$ as a pair of correlated random variables.  Previous results in this setting are discussed in \secref{sec:correlated variables}.
  Fuzzy min-entropy can be generalized to this setting. Fuzzy extractors consider the worst case $w'$.  When considing correlated readings, it is natural to treat $W'$ as a random variable:\footnote{Fuzzy extractors are defined to require high probability of correctness for all pairs $w, w'$.  In the correlated setting, it may make sense to provide an average-case guarantee, where the probability of correctness is also over the draw of $w, w'$.  Renner and Wolf use a smoothed notion of entropy that removes the $\delta$ fraction of the probability mass of $W=w|W'=w'$ with the most points to improve parameters under such a definition.  In this work, we consider worst case correctness and use unsmoothed entropy.}

\begin{definition}
\label{def:cor fuzz min}
Let $(W, W')$ be a pair of correlated random variables.  The correlated fuzzy min-entropy of $W, W'$ is:
\[\Hcorr(W, W') = -\log \left( \max_{w'\in \supp(W')}\sum_{w\in W | \Pr[W=w|W'=w']>0} \Pr[W = w] \right).\]
\end{definition}
\noindent 
In \defref{def:fuzzy min-ent}, the sum is implicitly over $W=w|W'=w'$ since we assume any $w'$ within distance $t$ is possible.  
For now, we consider sufficiency of $\Hfuzz(W)$ for key derivation from noisy sources (\defref{def:fuzzy min-ent}). We then consider the implications of our results on the correlated reading setting (\defref{def:cor fuzz min}).

\section{Sufficiency of $\Hfuzz$ When the Algorithms Know the Distribution}
\label{sec:known distributions}
In this section, we consider fuzzy extractors that precisely know the input distribution $W$.  We call this setting the \emph{known-distribution} setting (see discussion after \defref{rem:discussion of known dist}).
We show it is possible to build known-distribution secure sketches~(and thus fuzzy extractors through \lemref{lem:fuzzy ext construction}) whenever $\Hfuzz(W)= \omega(\log n)$.
We first consider flat distributions and show that hashing maintains fuzzy min-entropy and suffices to disambiguate points.  We then turn to arbitrary distributions.  

\subsection{Flat Distributions}
A distribution is flat if all points in its support have the same probability. 
\begin{definition}
A distribution $W$ is \emph{flat} if for all $w_0, w_1 \in \supp(W)$,  $\Pr[W=w_0] = \Pr[W=w_1]$.  
\end{definition}

Denote the largest number of points in a ball of radius $t$ in the support of $W$ as $\beta_{t} = \max_{w'\in\M} |\{w | w\in \supp(W) \wedge \dis(w ,w')\le t\}|$.   For flat distributions, the weight of this maximum-probability ball (which determines $\Hfuzz(W)$ by \defref{def:fuzzy min-ent}) is proportional to the number of points in it.  More precisely, 
\begin{align}
\Hfuzz(W) &= -\log \left(\max_{w' \in \mathcal{M}} \left| \{w | w\in \supp(W) \wedge \dis(w, w')\le t\} \right|\cdot \Pr[W=w]\right) \nonumber \\
&= -\log\left( \max_{w' \in \mathcal{M}} |\{w | w\in \supp(W) \wedge \dis(w, w')\le t\}| \cdot 2^{-\Hoo(W)}\right) \nonumber \\
&= \Hoo(W) -\log \beta_{t}.\label{eq:fuzz for flat}
\end{align}

\noindent We use universal hashes to construct secure sketches for flat distributions.  Skoric et al.~constructed secure sketches from universal hashes to correct a polynomial number of error patterns~\cite{skoric2009efficient}.


\begin{definition}[\cite{DBLP:journals/jcss/CarterW79}]
Let $F : \mathcal{K} \times \mathcal{M} \to R$ be a function.  We say that $F$ is \emph{universal} if for all distinct $x_1, x_2 \in \mathcal{M}$:
\[
 \Pr_{K \leftarrow \mathcal{K}}[F(K, x_1) = F(K, x_2)] = \frac{1}{|R|} \;.
\]
\end{definition}

\begin{construction}
\label{cons:universal hash}
Let $F :\mathcal{K}\times \mathcal{M}\rightarrow R$ be a universal hash function.  Let $W$ be a distribution.  Define $\sketch_W, \rec_W$ as:

\begin{center}
\begin{tabular}{c|c}
\begin{minipage}{2.5in}
\textbf{$\sketch_W$}
\begin{enumerate}
\item \underline{Input}: $w$.
\item Sample $K\leftarrow \mathcal{K}$.
\item Set $p = F(K, w), K$.
\end{enumerate}
\vspace{.3in}
\end{minipage} &
\begin{minipage}{3.5in}
\textbf{$\rec_W$}
\begin{enumerate}
\item \underline{Input}: $(w', p = y, K)$
\item Let $W^* = \{w \in \supp(W) | \dis(w, w')\le t\}$.
\item For $w^*\in W^*$, if $F(K, w^*) = y$, \\ output $w^*$.
\item Output $\perp$.
\end{enumerate}
\end{minipage}
\end{tabular}
\end{center}
\end{construction}

\begin{lemma}
\label{lem:flat hashing}
Let $W$ be a flat distribution with $\Hoo(W)\ge m$.  Then
Construction \ref{cons:universal hash} is a $(\mathcal{M}, \{W\}, m - \log |R|, t)$-known distribution secure sketch with error $\delta \le \frac{\beta_{t}-1}{|R|}$. 
\end{lemma}
\begin{proof}
We first argue security.  Fix some $W\in\mathcal{W}$. Since $\mathcal{K}$ and $W$ are independent $\Hav(W | \mathcal{K}) = \Hoo(W) = m$.  Then by \cite[Lemma 2.2b]{DBLP:journals/siamcomp/DodisORS08}, $\Hav(W | \mathcal{K}, F(\mathcal{K}, W)) \ge \Hoo(W) - \log |F(\mathcal{W}, W)| \ge m - \log |R|$.

We now argue correctness.  Fix some $w, w'$.  Let $W^*$ denote the set of elements in $W$ within distance $t$ of $w'$.  The size of $W^*$ is at most $\beta_{t}$.  Since $w, w'$ are independent of $\sketch$ this set is independent of the choice of $\mathcal{K}$.  The algorithm  $\rec$ will never output $\perp$ as the correct $w$ will match the hash.  The probability that another element $w^*$ collides is:
\begin{align*}
\Pr[\exists w^* \in W^* |w^* \neq w \wedge F(K, w^*) = F(K, w)] &\le \sum_{w^*\in W^* | w^*\neq w} \Pr[F(K, w^*) = F(K, w)] \\
 &= \sum_{w^*\in W^* | w^*\neq w} \frac{1}{|R|} \le \frac{\beta_{t}-1}{|R|}
\end{align*}
The inequality proceeds by union bound. The first equality proceeds by the universality of $F$ and the second inequality proceeds by noting the number of wrong neighbors is bounded by $\beta_{t}-1$.  This completes the proof.
\end{proof}

\begin{corollary}
Let $n = \log |\mathcal{M}|$.  If $|R| \ge |\beta_{t}|\cdot n^{\omega(1)}$ then \consref{cons:universal hash} is correct with overwhelming probability.  That is, setting $\log |R| = \log \beta_{t} + \omega(\log n)$ suffices.
\end{corollary}

\noindent
\consref{cons:universal hash} writes down enough information to disambiguate any ball of points.  The remaining entropy for this construction is 
$
\Hav(W |\sketch(W)) = \Hoo(W) - \log \beta_{t} - \omega(\log n).
$
For a flat distribution this is within a super-logarithmic factor of optimal~(see Equation~\eqref{eq:fuzz for flat}). By choosing $\delta$ based on $\Hfuzz(W)$ we build $(\sketch, \rec)$ such that $\Hav(W | \sketch(W)) = \omega(\log n)$.

\subsection{Arbitrary Distributions}
The worst-case hashing approach does not work for arbitrary sources.  The reason is that some balls may have  many points but low total weight. For example, let $W$ be a distribution consisting of the following balls. Denote by $B^1_t$ a ball with $2^{\Hoo(W)}$ points with probability $\Pr[W\in B^1_t] =2^{-\Hoo(W)}$.  Let $B^2_t,..., B^{2^{-\Hoo(W)}}_t$ be balls with one point each with probability $\Pr[W\in B^i_t] = 2^{-\Hoo(W)}$.  Then the hashing algorithm needs to write down $\Hoo(W)$ bits to achieve correctness on $B^1_t$.  However, with probability $1-2^{-\Hoo(W)}$ the initial reading is outside of $B^1_t$, and the hash completely reveals the point.  

Dealing with non-flat distributions requires a new strategy. 
Many solutions for manipulating high entropy distributions leverage a solution for flat distributions and use the fact that high entropy distributions are convex combinations of flat distributions.  However, a distribution with high fuzzy min-entropy may be formed from component distributions with little or no fuzzy min-entropy.  It is unclear how to leverage the convex combination property in this setting.  

The main obstacle in the arbitrary setting is distinguishing between a setting where a ball has a few high probability points and a large number of low probability points.
To overcome this problem, we write the probability of $w\in W$ in the sketch output.  To ensure this information does not completely reveal $w$ we write down $\lfloor \log \Pr[W=w] \rfloor$. We then use a universal hash whose output length is proportional to the number of close points of the same probability as $w$.  This construction divides the distribution $W$ into probability levels.  Each level is nearly flat.  

\begin{construction}
\label{cons:leveling}
Let $\mathcal{M}$ be a metric space and let $n =\log |\mathcal{M}|$. Let $W$ be a distribution with $\Hoo(W)= m$.  Let $\ell\in\mathbb{Z}^+$ be a parameter.  Let $L_i = (2^{-(i+1)}, 2^{-i}]$ for $i=m,..., m+\ell$.  Let $F_i :\mathcal{K}_i\times \mathcal{M}\rightarrow R_i$ be a parameterized family of universal hash functions.  Define $\sketch_W, \rec_W$ as: 
\begin{center}
\begin{tabular}{c|c}
\begin{minipage}{3in}
\textbf{$\sketch_W$}
\begin{enumerate}
\item \underline{Input}: $w$.
\item If $\Pr[W=w] \le 2^{-(m+\ell)}$. Set $p=0,w$.
\item Else
\begin{enumerate}
\item Find $i$ such that $\Pr[W=w]\in L_i$.
\item Sample $K\leftarrow \mathcal{K}_i$.
\item Set $ss =1,  i, F_i(K, w), K$.
\end{enumerate}
\end{enumerate}
\vspace{.28in}
\end{minipage} &
\begin{minipage}{3.5in}
\textbf{$\rec_W$}
\begin{enumerate}
\item \underline{Input}: $(w', ss)$
\item If $ss_0 = 1$, output $ss_{1,..., |y|}$.
\item Else
\begin{enumerate}
\item Parse $(i, y, K) = ss_{1,..., |y|}$.
\item $W^* = \{w \in \supp(W) |\\ \dis(w, w')\le t,\\ \Pr[W=w]\in L_i\}$.
\item For $w^*\in W^*$, \\if $F_i(K, w^*) = z$, output $w^*$.
\item Output $\perp$.
\end{enumerate}
\end{enumerate}
\end{minipage}
\end{tabular}
\end{center}
\end{construction}

\noindent
We extend our notation for the maximum likelihood ball to the leveled case.  Define $\beta_{t,i}$ as the maximum number of points in a ball in level $i$.  That is,
\[
\beta_{t,i} = \max_{w' \in \mathcal{M}} \left|\{w | w\in \supp(W) \wedge \dis(w, w')\le t \wedge \Pr[W=w]\in L_i\}\right|.
\]

\begin{theorem}
\label{thm:layered hashing}
Let $W$ be a distribution over $\mathcal{M}$ where $n =\log \M$.  Let $\delta>0$ be an function of $n$.  Let $F_i: \mathcal{K}_i \times \mathcal{M}\rightarrow R_i$ be a parameterized family of universal hash functions where $|R_i| = (\beta_{t,i}-1) /\delta$.  When $\ell = n$ \consref{cons:leveling} is a $(\mathcal{M}, \{W\}, \tilde{m}, t)$-known distribution secure sketch with error $\delta$ for $\tilde{m} = \Hfuzz(W) - \log n - \log 1/\delta - 3$.

\end{theorem}
\begin{proof}
Throughout the proof we assume that $\ell = n$ is the number of levels.  The proof can be carried out for an arbitrary $\ell$ but it leads to a complicated theorem statement.

\noindent
\textbf{Correctness:}  Fix some $w, w'$.  If $\Pr[W=w]\le 2^{-(m+\ell)} = 2^{-(m+n)}$, then $w$ is simply transmitted to $\rec$ and correctness is clear.  When $\Pr[W=w]> 2^{-(m+n)}$ let $L_i^*$ be the level of $\Pr[W=w]$.

Let $W^*$ denote the set of elements of $W$ in $L_i$ within distance $t$ of $w'$.  The size of $W^*$ is at most $\beta_{t,i}$. The choice of $w, w'$ is independent of $\sketch$, so this set is independent of $\mathcal{K}_i$~(it does effect the value of $i$ but not the particular outcome from $\mathcal{K}_i$).  The probability that another element $w^*$ matches the hash is:
\begin{align*}
\Pr[\exists w^* \in W^* |w^* \neq w \wedge F(K, w^*) = F(K, w)] &\le \sum_{w^*\in W^* | w^*\neq w} \Pr[F(K, w^*) = F(K, w)] \\
 &= \sum_{w^*\in W^* | w^*\neq w} \frac{1}{|R_i|} \le \frac{\beta_{t,i}-1}{|R_i|} = \delta
\end{align*}
The inequality is by union bound. The first equality follows from the universality of $F$.  The second inequality follows since the number of neighbors is bounded by $\beta_{t,i}$.  

\noindent
\textbf{Ideal Adversary with access to Level Information:} To aid in the argument in security, we show the level information on its own is not too harmful.

The best strategy for an adversary that receives $i$ as is to guess a point that has the most nearby weight in that level.  The adversary chooses \[w^*= \argmax_{w' \in \mathcal{M}}\Pr_{w\in W | 2^{-(i+1)}< \Pr[W=w]\le 2^{-i}\wedge \dis(w, w^*)}[W=w].\] The success of this adversary is at least $2^{-(i+1)}\beta_{t,i}$ as there at $\beta_{t,i}$ nearby points in that layer each with probability at least $2^{-(i+1)}$.  There are $n$ outcomes for $i$. The overall success of such an adversary is at most $n$ better than an adversary without such input~(by~\cite[Lemma 2.2]{DBLP:journals/siamcomp/DodisORS08}).  That is, 
\begin{align}
\expe&_{i | m\le i \le m+n}2^{-(i+1)}\beta_{t,i}\nonumber \\
&\le \expe_{i | m\le i \le n+m}\left( \max_{w^*\in W}\sum_{w\in W| 2^{-(i+1)}< \Pr[W=w]\le 2^{-i} \wedge \dis(w, w^*) \le t}\Pr [W=w]\right) \nonumber\\
&\le n \left(\max_{w^*\in W} \sum_{w\in W | \dis(w, w^*)\le t} \Pr[W=w]\right)\nonumber \\
&= n 2^{-\Hfuzz(W)}\label{eq:link fuzz 3}
\end{align}
\textbf{Security:}
We now argue security.  First note that the total weight of points whose probability is less than $2^{-(n+m)}$ is at most $2^{-m}$~(there are at most $2^n$ points in the distribution).  Let $1_{\text{low}}$ be the indicator random variable for $\Pr[W=w]\le 2^{-(n+m)}$.  Then 
\begin{align*}
\Hav(W | \sketch(W)) = -\log \left(\Pr[1_{\text{low}}=1] * 1 + \Pr[1_{\text{low}} =0]   2^{-\Hav(W | \sketch(W) \wedge 1_{\text{low}} = 0)}\right)\\
-\log\left( 2^{-m} + (1-2^{-m})2^{-\Hav(W | \sketch(W) \wedge 1_{\text{low}} = 0)}\right)
\end{align*}
For the remainder of the proof, we seek a bound on 
\[
2^{-\Hav(W | \sketch(W) \wedge 1_{\text{low}} =0} = \max_{w\in W | 2^{-(n+m)}<\Pr[W=w]}\Pr[W=w | \sketch(W)].
\]
We separate out this quantity into levels:
\begin{align*}
\max&_{w\in W | \Pr[W=w]>2^{-(m+n)}}\left(\Pr[W=w | \sketch(W)]\right)\\ &= \expe_{i | m\le i \le m+n} \left(\max_{w\in W | \Pr[W=w]\in L_i} \Pr[W=w | \sketch(W), i]\right)\\
&= \expe_{i | m\le i \le m+n} \left(\max_{w\in W | \Pr[W=w]\in L_i} \Pr[W=w]\cdot 2^{|\sketch(W)|i|}\right)\\
&\le \expe_{i | m\le i \le m+n} \left(\max_{w\in W | \Pr[W=w]\in L_i} \Pr[W=w]\cdot 2^{H_0(\sketch(W) | i)}\right)\\
&\le \expe_{i | m\le i \le m+n} \left(2^{-i}*\beta_{t,i}/\delta\right)
\le\frac{ \expe_{i | m\le i \le m+n} \left(2^{-(i+1)}\cdot \beta_{t,i}\right)}{2\delta}\\
&= \frac{n 2^{-\Hfuzz(W)}}{2\delta}.
\end{align*}
Where the last line follows by Equation~\eqref{eq:link fuzz 3}.
Combining both cases we have:
\begin{align*}
\Hav(W | \sketch(W)) &= -\log \left(2^{-m}+\frac{(1-2^{-m})(n)2^{-\Hfuzz(W)}}{2\delta}\right)\\
&\ge -\log \min\{2^{-m}, \frac{(1-2^{-m}) n2^{-\Hfuzz(W)}}{2\delta}\})-1\\
&\ge \Hfuzz(W) - \log n + \log \delta - \log (1-2^{-m}) - 2\\
&\ge \Hfuzz(W) - \log n + \log \delta - 3\\
\end{align*}
Where the third line follows from the second because $\Hfuzz(W)\le \Hoo(W) = m$. The last line follows from the fourth because if $m\ge 1$ then $\log (1-2^{-m})\le 1$ and if $m< 1$ the entire bound is vacuous as $\Hfuzz(W)< 1$.
\end{proof}

\begin{corollary}
\label{cor:extension to fuzz ext}
Let $\mathcal{M}$ be a metric space where $n = \log |\M|$.
For any distribution $W$ over $\mathcal{M}$ with $\Hfuzz(W)=\omega(\log n)$, there exists a $(\mathcal{M}, \{W\}, \tilde{m}, t)$-known distribution secure sketch with $\tilde{m} = \omega(\log n)$ and $\delta = \ngl(n)$.  (Extendible to a fuzzy extractor using~\lemref{lem:fuzzy ext construction}.)
\end{corollary}


\paragraph{Connection to the characterization of~\cite{renner2005simple}}
Renner and Wolf characterize when it is possible to derive keys from correlated random variables~\cite[Theorem 3]{renner2005simple}.  They consider all possible (randomized) transforms $T, T'$ of $W, W'$ into a new pair of variables $V,V'$.  They show that \[|\key| \le \sup_{(V, V')\leftarrow (T(W), T'(W))}\left(  \Hoo(V | T') - \log \max_{v'\in V'} |\{v| \Pr[V=v|T' \wedge V'=v']>0\}|\right).\]  Furthermore, they show that there is a transformation that achieves a key of nearly this length.  The result is nonconstructive as there is no guidance on how to find the transforms $T, T'$.  Since there is no known bound on the length of $T, T'$ it is not clear how to search the transform space even with unlimited time.

\consref{cons:leveling} can be used to derive keys from correlated random variables.  The main change is to define \[W^* = \{w | \Pr[W=w | W'=w'] >0 \wedge \Pr[W=w]\in L_i\}.\]  
Our result shows if one is satisfied with obtaining a strong key when possible (our protocol has losses of $2\log 1/\epsilon + \log n+\log 1/\delta$), then a protocol is possible (and explicitly constructible) in the original space. 


\section{Impossibility of Secure Sketches for a Family with $\Hfuzz$}
\label{sec:family of dist}

In the previous section, we showed the sufficiency of $\Hfuzz(W)$ for known distribution algorithms.  Unfortunately, it is unrealistic to assume that $W$ is completely known.  Traditionally, algorithms deal with this uncertainty by providing security for a family of distributions $\mathcal{W}$.  

In this section, we show uncertainty of $W$ comes at a real cost.  The security game of a fuzzy extractor can be thought of as a three stage process: 1) the challenger specifies $(\sketch, \rec)$, 2) the adversary sees $(\sketch, \rec)$ and specifies $W\in \mathcal{W}$ 3) the adversary wins if $\Hav(W|\sketch(W))< \tilde{m}$.  
We prove impossibility in a game that is harder for the adversary to win: 1) the challenger specifies $(\sketch, \rec)$ 2) the adversary samples a random distribution from $W\leftarrow \mathcal{W}$ 3) the adversary wins if $\Hav(W|\sketch(W)) < \tilde{m}$.  

Let $V$ be the process of uniformly sampling $W\leftarrow \mathcal{W}$ and then sampling $w\leftarrow W$.  Let the random variable $Z$ indicate which $W$ was sampled.  The view of the challenger is $V$, while the view of the adversary is a distribution $V|Z$.  Our results rule out security for an average member of $\mathcal{W}$.  It may be possible to improve parameters by ruling out only a worst case $W$.  In \chapref{sec:def equiv}, we show that providing security for a family $\mathcal{W}$ is equivalent to providing security for all distributions over that family.
We now show a family of distributions $\mathcal{W}$ that does not admit a secure sketch.  Our negative results in this chapter are specific to the Hamming metric.

\begin{theorem}
\label{thm:imposs sketch}
Let $n$ be a security parameter.  There exists a family of distributions $\mathcal{W}$ over $\mathcal{Z}^\gamma$ such that for each element $W\in \mathcal{W}$, $\Hfuzz(W)= \omega(\log n)$, and yet for any $(\mathcal{M}, \mathcal{W}, \tilde{m}, t)$-secure sketch $(\sketch, \rec)$ with error $\delta <1/4$ and distance $\gamma > t\ge 4$, the remaining entropy $\tilde{m}<2$.  

Furthermore, this is true on average.  Let $V$ be process of uniformly sampling $W\leftarrow \mathcal{W}$ and sampling $w\leftarrow W$, and let $Z$ indicate which $W$ is sampled.  Then
\[
\Hav(V|\sketch(V), Z)< 2.
\]
\end{theorem}

\begin{proof}
We prove the stronger average case statement.
We first describe a family $\mathcal{W}$.  Let $\mathbb{F}$ be some field of size $q =\omega(\poly(n))$.  
Let $\mathcal{W}$ be the set of all distributions of the form 
\[W =  \begin{pmatrix} \vec{1} \\a_2  \\ \vdots \\ a_{\gamma} \end{pmatrix} W_1 + \begin{pmatrix} 0  \\ 
b_2\\ \vdots \\ b_{\gamma} \end{pmatrix} 
\]
where $W_1$ is uniform and $W_i = a_i W_1 + b_i$ for $2\le i \le \gamma$ and $a_i, b_i\in\mathbb{F}, a_i\neq 0$.  
This type of distribution is an affine line in space $\mathbb{F}^\gamma$.  Define $V$ as the process of uniformly choosing $W\leftarrow \mathcal{W}$ and then sampling from $w\leftarrow W$.  The adversary sees $\sketch(V)$ and $Z$.  $Z$ is the description of the line $Z = a_2, b_2, ..., a_\gamma, b_\gamma$. The algorithms $\sketch, \rec$ never see $Z$.
Fix some $4\le t < \gamma$.
We show the following:

\begin{itemize}
\item \propref{prop:each element good}: for all $W\in \mathcal{W}$, $\Hfuzz(W) = \omega (\log n)$. That is, $\forall z, \Hfuzz(V | Z=z) = \omega(\log n)$.
\item \propref{prop:distribution uniform}: the distribution $V$ is uniform.
\item \lemref{lem:secure sketch entropy loss}: for any secure sketch on $V$, the support size of $V | \sketch(V)$ decreases significantly.  Here we show the minimum distance of $V|\sketch(V)$ is at least $t$.
\item \lemref{lem:side info determines sketch}:  for most lines $Z$, the intersection of the support of $V|Z$ and $V | \sketch(V)$ is small.  That is, $\tilde{H}_0(V | \sketch(V), Z) < 2$.
\end{itemize}
%Let $c'\leftarrow \neigh_t(c)$ sample a uniform point within distance $t$ of $c$.  
The proof of \thref{thm:imposs sketch} uses Shannon codes (\defref{def:shannon-code}).
We now prove item in the above outline.

\begin{proposition} 
\label{prop:each element good} For each $W\in\mathcal{W}$, $\Hfuzz(W) = \omega(\log n)$.
\end{proposition}
\begin{proof}
Consider some $W\in\mathcal{W}$.  The value $w_1$ is uniform in a field of size $\omega(\poly(n))$, so $\Hoo(W) =\omega(\log n)$.  We now show that for any $w, w'\in W$, $\dis(w, w') = \gamma>t$.  This shows that $\Hfuzz(W) = \Hoo(W)$.  Fix some $w, w'\in W$.  Clearly, $w_1 \neq w_1'$, for any $i$, $w_i = a_i w_1 + b_i$ and $w_i' = a_i w_1' + b_i$.  Since $a_i\neq 0$, $a_i w_1 \neq a_i w_1'$ and thus $a_i w_1+b_i \neq a_i w_1'+b_i$.  That is, $\dis (w, w')  =\gamma$.
\end{proof}

\begin{proposition}
\label{prop:distribution uniform} $V$ is the uniform distribution over $\mathbb{F}^\gamma$.
\end{proposition}
\begin{proof}
Consider some $w\in V$.  Then $w$ was drawn from some intermediate distribution $W$ with coefficients $a_2, b_2, ..., a_\gamma , b_\gamma$.  The value $w_1$ is uniformly random and $w_i$ are uniformly random since $b_2,..., b_\gamma$ are uniformly random.
\end{proof}


\begin{lemma}
\label{lem:secure sketch entropy loss}
Fix some $\sketch, \rec$ algorithm with error $\delta < 1/4$, then $\tilde{H}_0(V | \sketch(V)) \le (\gamma-t+1)\log q+1$.
\end{lemma}
\begin{proof}
We assume that $\rec$ is deterministic in our analysis.  Any randomness necessary for the \rec algorithm can be provided by \sketch.  This is the same as considering $\rep$ that outputs any coin it flips.  Since $w, w'$ are independent of $p$ this does not effect correctness.  Security is defined based on the output of $\rec$ so outputting the coins of $\rep$ does not effect security.
By the definition of correctness for $(\sketch, \rec)$, 
\[
\forall w, w', \Pr_{ss\leftarrow \sketch(w)} [\rec(w', ss) \neq w] < 1/4.
\]
%For most $p$, $\rec$ works on most neighbors of $w$.  
Fix some $w$.  
By Markov's inequality, there exists a set $A_{ss}$ such that $\Pr[ss\in A_{ss}]\ge 1/2$ and $\forall ss\in A_{ss}$, 
\[
\{w' | \dis (w', w)\le t \wedge \rec(w', p) \neq w\}\le 2\delta < 1/2.\]

Consider some $ss^*\in A_{ss}$.  We now show that $H_0(V | \sketch(V) = ss^*) \le (\gamma-t+1)\log q$.  For the sketched value $w$, $\{w' | \dis(w, w') \le t \wedge \rec(w', p) \neq w] \le 2\delta$.  

For every value in $V|\sketch(V) = ss^*$ this is also true.  This makes the support of $V|\sketch(V)=ss^*$ a $(t, 2\delta)$-Shannon code~(see \defref{def:shannon-code}).  This implies that for all $w_1, w_2 \in V|\sketch(V)=ss^*$, $\dis(w_1, w_2)\ge t$~(since $2\delta< 1/2$).  That is $V|\sketch(V)=ss^*$ is a set with minimum distance at least $t$.  

By the Singleton bound, this implies that $H_0(V |\sketch(V)=ss^*) \le (\gamma -t+1 )q$.  Averaging over $\sketch(V)=ss^*$ one has that $\tilde{H}_0(V|P) \le (\gamma -t +1) \log q +1$.
\end{proof}

\begin{lemma}
\label{lem:side info determines sketch}
$\Hav(V | \sketch(V), Z) <2$.
\end{lemma}
\begin{proof}
Recall that $Z$ consists of $2\gamma$ coefficients and there are $(q-1)^{\gamma-1} q^{\gamma-1}$ equally likely values for $Z$.
 As described above, the view of $\sketch, \rec$ is a uniform distribution $V$.  %Having seen $V$ there are many possible values for $Z$.  Furthermore, the distributions $V|Z$ have disjoint support outside of the observed point.  
 The only information seen by $\sketch$ algorithm is in the point $V=v$.  The length of this point is $\gamma \log q$.  Conditioned on this information there are still many possible values for $Z$.  That is, 
 \[
 \forall v, H_0(Z | V=v) =\log \left(\frac{(q-1)^{\gamma-1} q^{\gamma-1}}{q^\gamma}\right) = \log \left( (q-1)^{\gamma-1}/q\right).
 \]
Consider two possible $z_1, z_2$ that are possible values of $Z$~(having seen $v$).  The distributions $V| Z=z_1$ and $V | Z=z_2$ intersect at one point~(namely $v$).  

We now show for any sketch algorithm there are few possible values of $V|Z$ in the support of $V |\sketch(V)$.  The distributions $V | Z=z_1$ and $V| Z=z_2$ for possible $z_1, z_2$~(having seen $v$) overlap only at the point $v$.  This means for any $v^*\in V| \sketch(V)$ (other than the true $v$) there is at most one $z$ such that $v^*\in V | \sketch(V), Z=z$.  

The optimum strategy is to include these values uniformly from different $Z$ values.
We show this across different sketch values.  Consider some fixed sketch value $s$ and let $h_s= H_0(V | \sketch(V) = s)$.  %That is, there are $2^{h_s}$ possible values for $V$ conditioned on $\sketch(V) = s$.  
Recall that 
\[
\tilde{H}_0(V | \sketch(V)) =  \log \expe_{s\in \sketch(V)} 2^{H_0(V | \sketch(V) = s)}  = \log \expe_{s\in \sketch(V)} 2^{h_s} %\le   (\gamma-t+1)\log q+1.
\]  
Conditioned on seeing the point $V$ there are $(q-1)^{\gamma-1}/q$ possible values for $Z$ with disjoint support outside of the sketched point.  Consider these possible values for $Z$ as containers to be filled with the $2^{h_{ss}}$ items~(possible values of $V | \sketch(V)=ss$).  Each container receives automatically receives one free point~(all the distributions share $v$).  The average number of items in each container is maximized when the containers are filled equally.  That is, the average number of items in each container is bounded by the number of items divided by the number of container.  That is, 
\begin{align*}
\tilde{H}_0(V |Z  , \sketch(V) = ss) &\le \log \left(\frac{\text{\# items}+\text{\# containers}}{\text{\# containers}}\right)\\
&= \log \left(\frac{2^{h_{ss}}q}{(q-1)^{\gamma-1}} +1 \right)
\end{align*}
Then averaging over the possible values of $s$, we have the following as long as $t\ge 4$~(using  \lemref{lem:log-minus-one}, which appears below):
\begin{align*}
\tilde{H}_0(V |Z , \sketch(V) ) &= \log \expe_{s\in \sketch(V)} 2^{\tilde{H}_0(V |  \sketch(V) =ss , (Z| \sketch(V) =ss) )}\\
&= \log\expe_{s\in \sketch(V)} \left(\frac{2^{h_s}q}{(q-1)^{\gamma-1}} +1\right)\\
&\le \max\left\{ \log \left(\frac{q}{(q-1)^{\gamma-1}} \expe_{s\in \sketch(V)} 2^{h_s}\right)+1, 1\right\}.
\end{align*}
Where the inequality follows because $\log x+1 \le \max\{ 1+ \log x,1\}$ for $x\ge 0$.
The left operand to $\max$ is bounded by $2$~(bounding the $\max$ by $2$):
\begin{align*}
\log &\left(\frac{q}{(q-1)^{\gamma-1}} \expe_{s\in \sketch(V)} 2^{h_s}\right)+1\\
&=\log q - (\gamma -1)\log (q-1) + \log \left(\expe_{s\in \sketch(V)} 2^{h_s}\right) +1\\
&=\log q - (\gamma -1)\log (q-1) + \tilde{H}_0(V | \sketch(V)) +1 \\ 
&\le \log q - (\gamma -1)\log (q-1) + (\gamma-t+1)\log q+2\\
&\le (\gamma-t+2)\log q - (\gamma-1) \log (q-1)+2\\
&< (\gamma-t+2)\log q - (\gamma-2) \log q +2 \ \ \ \ \mbox{(by Lem \ref{lem:log-minus-one})}\\
&\le (4-t)\log q +2< 2\,.
\end{align*}
\end{proof}

\begin{lemma}
\label{lem:log-minus-one}
For any real numbers $\alpha \leq \eta$ with $\eta \ge e+1$ (in particular, $\eta\ge 4$ suffices), the following holds:
$\alpha \log (\eta-1) > (\alpha-1)\log \eta$. 
\end{lemma}

\begin{proof}
Because $\eta-1$ is positive, and $1+x<e^x$ for positive $x$,
$$1+\frac{1}{\eta-1} < e^{\frac{1}{\eta -1}}\,.$$  Therefore, 
$$\left(1+\frac{1}{\eta-1}\right)^{\alpha-1} < e^{\frac{\alpha-1}{\eta-1}}\le e < \eta-1$$ (since $\alpha\le \eta$). Multiplying both sides by $(\eta-1)^{\alpha-1}$, we obtain
$$\eta^{\alpha-1} < (\eta-1)^\alpha\,.$$
Taking the logarithm of both sides yields the statement of the lemma.
\end{proof}


\end{proof}

\noindent
\textbf{Note:} There is a tradeoff between the size of $\mathbb{F}$ and the error tolerance required for the counter example.  By increasing $t$ it is possible to show a counter example for a smaller $\mathbb{F}$.

\section{Impossibility of Fuzzy Extractors for a Family with $\Hfuzz$}
\label{sec:imposs fuzz ext}
In the previous section, we showed a family of distributions that does not admit a secure sketch.  We provide a similar result for fuzzy extractors.  

\begin{theorem}
\label{thm:imposs fuzz ext}
Let $n$ be a security parameter.  There exists a family of distributions $\mathcal{W}$ over $\zo^n$ satisfying the following conditions. For each element $W\in \mathcal{W}$, $\Hfuzz(W)= \omega(\log n)$. Let $\kappa \ge 2$ and $t = \omega(n^{1/2}\log n)$.  Any $(\mathcal{M}, \mathcal{W}, \kappa, t, \epsilon)$-fuzzy extractor with error $\delta = 0$ has $\epsilon > 1/8 - \ngl(n)$.

Furthermore, this is true on average.  Let $V$ be process of uniformly sampling $W\leftarrow \mathcal{W}$ and sampling $w\leftarrow W$ and let $Z$ indicate which $W$ is sampled.  Let $(\Key, P)\leftarrow \gen(V)$.  Then, 
\[
\sd ((\Key,P, Z), (U_\kappa, P, Z))>1/8-\ngl(n)\,.
\]

\end{theorem}

\begin{proof}[Proof Outline]
We prove the stronger average case statement.
Let $\nu = \omega(\log n)$ and $\nu = o(n^{1/2}/\log n)$.  Let $t=4\nu n^{1/2}$ and note that $n/\nu >t$.  

Our counterexample uses a slightly different family of distributions $\mathcal{W}$ than the counterexample for secure sketches.  
We will work over a binary alphabet~(we used a large alphabet in our counterexample for secure sketches).  A  property of the binary Hamming space is that a large fraction of any set of bounded size is the near ``boundary'' of that set.  This will be crucial in our proof.  We will embed the larger alphabet we used into the binary Hamming metric.
Let $x_1,..., x_\nu \in \zo^\nu$.  Let $\mathbb{F}$ denote the field of size $2^{\nu}$.  Let $a_2,..., a_{n/\nu}\in\mathbb{F}$ such that $a_i\neq 0$ and let $b_2,..., b_{n/\nu}\in\mathbb{F}$.  
Interpret $x_1,..., x_{\nu}$ as a element $x\in \mathbb{F}$ and let 
\[w =  \begin{pmatrix} \vec{1} \\a_2  \\ \vdots \\ a_{n/\nu} \end{pmatrix} x + \begin{pmatrix} 0  \\ 
b_2\\ \vdots \\ b_{n/\nu} \end{pmatrix} .
\]
The multiplication is in $\mathbb{F}$.
Define a distribution $W$ as the uniform distribution over values of $x$ for a particular value of $a_2,..., a_{n/\nu}$, $b_2,..., b_{n/\nu}$.  
Let $\mathcal{W}$ be the set of all such $W$.  

 Define $V$ as the process of uniformly choosing $W\leftarrow \mathcal{W}$ and then sampling from $w\leftarrow W$.  The adversary sees $\sketch(V)$  and $Z$, where $Z$ is the description of the line $Z = a_2,..., a_{n/\nu}, b_2, ..., b_{n/\nu}$.
 
We now present an outline of the proof~(formal statements and proofs follow):
\begin{itemize}
\item \propref{prop:dist fuzzy ent fuzz}: for all $W\in \mathcal{W}$, $\Hfuzz(W) = \omega (\log n)$. That is, $\forall z, \Hfuzz(V | Z=z) = \omega(\log n)$.
\item \propref{prop:dist uniform fuzz}: the distribution $V$ is uniform.
\item \lemref{lem:fuzz can't get key}: In expectation across $Z$, a large subset of keys that are not possible.  In more detail,
\begin{itemize}
\item Half the keys have at most $2^{n- \kappa}$ pre images in the metric space~(this is at most half the metric space).  Denote this set as $R_{sml}$.  
\item Consider some $\key \in R_{sml}$.  %Our goal is to show there are few values of $v \in V|Z$ that could produce this key.  
Consider the set of $V_{\key } = \{w | \rep(w, p) = \key \}$.  All points in $V | \sketch(V)$ are distance $t$ from a boundary of $V_\key$~(the functionality of $\rep$ guarantees that for the true $w$ all nearby points map to the same $\key$).  We show that most of $V_\key$ is near a boundary.  A result of Frankel and F{\"u}redi says that the boundary of a region is minimized by a ball containing the same number of points~\cite{frankl1981short}.  Hoeffding's inequality says that most of a ball lies near its boundary~\cite{hoeffding1963probability}.  Together these two results imply that $V_\key$ is small.
\item As before, there are many possible values for $z_1, z_2$ for the side information $Z$~(and these possible values are equally likely).  Furthermore, the distributions $V|Z=z_1 $ and $V| Z=z_2$ have disjoint support outside of $v$.
\item For most values of possible $Z$, the intersection between the viable pre images of $V|Z$ and $V_\key$ contains at most one point~(the received point $v$).  Checking if $V|Z \cap V_{\key}$ is nonempty is an effective distinguisher.
\end{itemize}
\end{itemize}
\end{proof}
\begin{proposition} 
\label{prop:dist fuzzy ent fuzz}
For each $W\in\mathcal{W}$, $\Hfuzz(W) = \omega(\log n)$.
\end{proposition}
\begin{proof}
Consider some fixed $W\in\mathcal{W}$.  The bits $w_{1,..., \nu}$ are uniform, so $\Hoo(W) =\omega(\log n)$.  Recall that $t=o (n/\nu)$. 
 %We now show that for any $w, w'\in W$, $\dis(w, w') \ge n/\nu$ and thus $\Hfuzz(W) = \Hoo(W)$.  
Fix some $w, w'\in W$.  Denote by $x, x'$ the values that produce $w, w'$ respectively.  Clearly, $x\neq x'$.  Thus, for any $i$, $a_i x + b_i \neq a_i x' + b_i$.  This implies that $w_{i\nu+1,...., (i+1)\nu} \neq w'_{i\nu+1,..., (i+1)\nu}$. That is, at least one of the bits in each block differs between $w$ and $w'$, and so $\dis(w, w') \ge n/\nu$. Since no two values in the support of $W$ lie in the same ball of radius $t$, we have $\Hfuzz(W) = \Hoo(W)= \omega(\log n)$.
\end{proof}

\begin{proposition}\label{prop:dist uniform fuzz}
$V$ is the uniform distribution over $\mathbb{F}^\gamma$.
\end{proposition}
\begin{proof}
Consider some $w\in V$ over $\zo^n$.  Then $w\leftarrow W$ with coefficients $a_2, ...,a_\gamma$ and $b_2, ... , b_\gamma$.  The value $w_{1,...,\nu} =x $ is uniformly random and $w_{i\nu+1,...,(i+1)\nu}$ are uniformly random since $b_2,..., b_\gamma$ are random.
\end{proof}

\begin{lemma}
\label{lem:fuzz can't get key}
Fix some $(\gen, \rep)$ algorithm with $\kappa \ge 2$.  There exists an information theoretic distinguisher between $(R, P, Z)$ and $(U_\kappa, P, Z)$ with advantage $\epsilon = 1/8-\ngl(n)$.
\end{lemma}
\begin{proof}
As in the proof of \thref{thm:imposs sketch}, we assume that $\rep$ is deterministic.  Denote by $(\Key, P) \leftarrow \gen(V)$.
By Markov's inequality, there exists a set $A_{p}$ such that $\Pr[p\in A_{p}]\ge 1/2$ and $\forall p\in A_{p}$, 
\[
(\Key |P =p, P = p ) \approx_{2\epsilon} (U_\kappa , P =p).
\]
%\{w' | \dis (w', w)\le t \wedge \rec(w', p) \neq w\}\le 2\delta < 1/2.\]

Consider some $p^*\in A_{p}$.  %Since $(R | P=p^*, p^*)\approx_{2\epsilon} (U, p^*)$ this means that $R|P=p^*$ is at least $(1-2\epsilon)2^\kappa$.  
The distribution $\Key|P=p^*$ is the set of possible keys.
The distribution $\Key|P=p^*$ induces a partition on the metric space.  That is, for every $w\in\mathcal{M}$, there exists a unique value $\key$ such that $\rep(w, p^*) =\key$.  Denote this partition by $Q_{p^*,\key} = \{w | \rep(w, p^*) = \key\}$.  

There exists a set $R_{sml}$  where $|R_{sml} | \ge 2^{\kappa-1}$ such that for all $\key\in R_{sml}$,  $|Q_{p^*, r}|\le \mathcal{M}/2^{\kappa} = 2^{n-\kappa }$.  If not, then $\cup_{\key} |Q_{p^*, \key}| > |\mathcal{M}|$.
For the remainder of the proof we restrict ourselves to elements in $R_{sml}$.  Only points that are distance $t$ from points outside of $Q_{p^*, r}$ are viable points in the metric space.  These are the interior of $Q_{p^*, r}$:
\begin{align*}
\inter(Q_{p^*, \key}) = \{w | \rep(w, p^*) = \key \wedge \forall w', \dis(w, w') \le t \wedge \rep(w', p^*) =\key\},\\
%\crust(Q_{p^*, r}) = \{w | \rep(w, p^*) = r \wedge \exists w', \dis(w, w')\le t \wedge \rep(w', p^*) \neq r\}.
\end{align*}
We will use the term deficient ball\footnote{In most statements of the isoperimetric inequality, this type of set is simply called a ball.  We use the term deficient ball for emphasis.}:
\begin{definition}
A set $S$ is a $\eta$-deficient ball if there exists a point $x$ such that $B_{\eta-1}(x) \subseteq S \subseteq B_{\eta}(x)$.
\end{definition}

Consider some $\key^*\in R_{sml}$.  
We now  show that the interior of each $Q_{p^*, \key^*}$ is small:
%Recall that $B_{n-\kappa-t}$ denotes the ball of radius $n-\kappa -t$.

\begin{lemma}
$|\inter(Q_{p^*, \key^*})| \le 2^{n-4\nu}$.
\end{lemma}
\begin{proof}
By the isoperimetric inequality on the Hamming space~(we use a version due to~\cite[Theorem 1]{frankl1981short}, the original result is due to Harper~\cite{harper1966optimal}), there exists a $\eta$-deficient ball $S_{p^*, \key^*}$ centered at $0$ and a set $D$ such that $|S_{p^*, \key^*}| = |\inter(Q_{p^*, \key^*})|$, $|D| = |Q_{p^*, \key^*}^\complement|$ and $\forall s\in S_{p^*, \key^*}, d\in D$, $\dis(s, d) \ge t$~(alternatively, the distance between the sets is $t$).  Furthermore, note that $S_{p^*, \key^*} \cup D$ is a deficient ball~(and its radius is $\eta+t$).
We now find bound the size of $S_{p^*, \key^*}$.

Recall that $|S_{p^*, \key^*} \cup D| = |Q_{p^*, \key^*} | \le 2^{n-\kappa}\leq |\mathcal{M}|/2$.  Since this set contains less than half the points in the metric space we know its radius at most $n/2$.  This means that $|S_{p^*, \key^*}|$ is a deficient sphere of radius at most $n/2-t$.  Let $X$ denote a uniform string on $\zo^n$.  We use Hoeffding's inequality~\cite{hoeffding1963probability}:

\begin{align*}
|S_{p^*, \key^*}| \le \{ x | \dis (x, 0)\le n-t\} &= 2^n \Pr_{X\leftarrow \zo^n} [ \weight(X) \le (1/2-t/n)n] \\
&\le 2^n e^{-n ((t/n)^2)} = 2^n e^{-4\nu} \le 2^{n - 4\nu}
\end{align*}
\end{proof}

We have shown that $|\inter(Q_{p^*, \key^*})| \le 2^{n-4\nu}$.  
To complete the proof it suffices to show that for most values of the auxiliary information $Z$ there are many parts $Q_{p^*, \key^*}$ that do not receive any points.  
Recall that $Z$ consists of $2n/\nu$ coefficients and there are $(2^{n/\nu}-1)^{\nu-1} 2^{n-\nu}$ equally likely values for $Z$.
 As described above, the view of $\gen, \rep$ is a uniform distribution $V$.  We know show there are many possible values for $Z |P=p^*$.  The only information about $Z$ is contained in the point  $V=v$.  The length of this point is $2^n$.  Conditioned on this information there are still many possible values for $Z$.  That is, 
 \begin{align*}
 \forall v, H_0(Z | V=v) &=\log \left(\frac{(2^{n/\nu}-1)^{\nu-1} 2^{n-\nu}}{2^n} \right)\\
  &= \log \frac{(2^{n/\nu}-1)^{\nu-1}}{2^{\nu}} \\
  &>\log  \frac{(2^{n/\nu})^{\nu-2}}{2^{\nu}} \ \ \ \ \mbox{(by \lemref{lem:log-minus-one})}\\
  &=\log \frac{2^{(n-2\nu))}}{2^\nu} = n -3\nu.
 \end{align*}
Consider two possible $z_1, z_2$ that are possible values of $Z$.  The distributions $V| Z=z_1$ and $V | Z=z_2$ intersect at one point~(namely $v$).  

This means that the $\gen$ algorithm may include points for possible $Z$ values into parts $Q_{p^*, \key^*}$~(other than $v$) and these values are disjoint.  The optimum strategy is to include these values uniformly from different $Z$ values.  Consider the set of all preimages of $R_{sml}$ denoted $Q_{sml} = \cup_{\key\in R_{sml}} \inter(Q_{\key, p^*})$.  Note that $Q_{sml} \le 2^{n-4\nu}|R_{sml}|$.  We now show that the intersection between $Q_{\key, p^*}$ is small for most possible values $z$.  As before each container~(the values of $z$)  receives one item for free~(the point $v$).
\begin{align*}
\expe_z |Q_{sml} \cap (V | P=p^* \wedge Z=z) | &\le \left(\frac{\text{\# items}+\text{\# containers}}{\text{\# containers}}\right)\\
&\le \frac{2^{n-4\nu}|R_{sml}|}{2^{n - 3\nu}}+1\\
&=\frac{|R_{sml}|}{2^{\nu}}+1
\end{align*}
In expectation across $Z$, 
\[\frac{\frac{|R_{sml}|}{2^{\nu}}+1}{|R_{sml}|} \le \frac{1}{2^\nu}+\frac{1}{|R_{sml}|} \] fraction of $R_{sml}$ receive any support.  %Thus, at most $1+1/2^{\nu} = \ngl(n)$ keys in $R_{sml}$ have any support  conditioned on $p^*$ and $Z$~(note this is an expectation across the values of $Z$).  
We now present a distinguisher $D_{p^*}$ for a particular $p^*$:
\begin{enumerate}
\item On input $x, z$.
%\item If $x\not \in R_{sml}$ output random bit $b$.
\item Compute $V|P=p^* \wedge Z=z$ and $Q_{p^*, x}$. 
\item If $(Q_{p^*, x} \cap V|P=p^* \wedge Z=z) =\emptyset$ output $b=0$.
\item Else output $b=1$.
\end{enumerate}

The distinguisher $D(x, p, z)$ is formed by calling $D_p(x, z)$ when $p\in A_p$ and outputting a random bit otherwise.  The advantage of $D$ is 
\begin{align*}
\Pr[D(\Key, P, Z) = 1] &- \Pr[D(U, P, Z) =1]\\
&=(\Pr[D(\Key, P, Z) = 1| P\in A_p] \\&\,\,\,\,- \Pr[D(U, P, Z) =1 | P\in A_p])\Pr[P\in A_p]\\
&\ge \sum_{p^*\in A_p} \Pr[P=p^*] \left(1 - \Pr[D_{p^*}(U, Z)=1]\right)\\
&\ge \sum_{p^*\in A_p} \Pr[P=p^*] (1-\\&\,\,\,\, \Pr[D_{p^*}(U, Z)=1 | U\in R_{sml}]\Pr[U\in R_{sml}] )\\
&\,\,\,\,- \sum_{p^*\in A_p} \Pr[P=p^*]\Pr[U\not\in R_{sml}]\\
&\ge \sum_{p^*\in A_p} \Pr[P=p^*] \left(1- \left(\left(\frac{1}{|R_{sml}|}+\frac{1}{2^\nu}\right)\Pr[U\in R_{sml}]\right) \right)\\
&\,\,\,\,-  \sum_{p^*\in A_p} \left(\Pr[P=p^*]\Pr[U\not\in R_{sml}]\right)\\
&\ge \sum_{p^*\in A_p} \Pr[P=p^*] \left(1- \frac{1}{2^{\nu}} -\frac{1}{2}\Pr[U\in R_{sml}] - \Pr[U\not \in R_{sml}]\right)\\
&\ge \sum_{p^*\in A_p} \Pr[P=p^*] \left(1- \frac{1}{2^{\nu}} -\frac{1}{2}\Pr[U\in R_{sml}] - \Pr[U\not \in R_{sml}]\right)\\
&\ge \sum_{p^*\in A_p} \Pr[P=p^*] \left(1- \frac{1}{2^{\nu}} -1+\frac{1}{2}\Pr[U\in R_{sml}] \right)\\
&\ge \sum_{p^* \in A_p} \Pr[P=p^*]\left(1/4-\ngl(n)\right) \ge \frac{1}{8}-\ngl(n).
\end{align*}
The sixth line follows since $R_{sml} \ge 2^{\kappa-1}\ge 2$.  The eighth line follows because $\Pr[U\in R_{sml}]\ge 1/2$.  The last inequality proceeds because $\Pr[P\in A_p]\ge 1/2$.
This completes the proof of \lemref{lem:fuzz can't get key}.
\end{proof}


\noindent
\textbf{Note:} As stated in \secref{sec:related settings}, using strong computational assumptions it is possible to avoid this result.  Furthermore, for the specific family used in the secure sketch, we construct computational fuzzy extractors  for this family of distributions when $\mathbb{F}$ is large enough under weaker assumptions in \consref{cons:first construction}.  The construction is stated with imperfect correctness.  A construction with perfect correctness is obtained by using a code that corrects $t$ bidirectional errors instead of a code that corrects $t$ unidirectional errors.

\paragraph{Comparison with \thref{thm:imposs sketch}} The parameters in this result are weaker than those in \thref{thm:imposs sketch}.  This result requires: 1) higher error tolerance $t= \omega(n^{1/2}\log n)$ 2) the fuzzy extractor must have perfect correctness.  The secure sketch counter example needs $t=4$ and allows the $\rec$ to be wrong almost $1/4$ of the time.

%\appendix 
