%!TEX program = xelatex
%!TEX TS-program = xelatex
%!TEX encoding = UTF-8 Unicode

\documentclass[12pt,t,aspectratio=169,mathserif]{beamer}
%Other possible values are: 1610, 149, 54, 43 and 32. By default, it is to 128mm by 96mm(4:3).
%run XeLaTeX to compile.

\input{wang-slides-preamble.tex}

\begin{document}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\title{Applied Stochastic Processes - Lecture 05}
\subtitle{(5.3 - 5.4) Poisson Processes - Associated Distributions}
%(5.3-5.4) 
%\institute{上海立信会计金融学院}
\author{MAP SK}
\date{April 20, 2021}

\maketitle

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

%\begin{enumerate}
%\item 第一讲：March 9 (3.1-3.3) Markov Chains - Concepts and Examples
%\item 第二讲：March 16 (3.4-3.5) Markov Chains - First Step Analysis, More Examples
%\item 第三讲：March 23 (4.1-4.4) Markov Chains - Long Run Behaviors
%\item 第四讲：April 13 (5.1-5.2) Poisson Processes - Concept and Examples 
%\item 第五讲：April 20 (5.3-5.4) Poisson Processes - Associated Distributions  
%\item 第六讲：May 11 (6.1-6.1) Markov Chains - Continuous Time, Pure Birth Processes
%\item 第七讲：May 18 (7.1-7.2) Renewal Processes - Renewal Function, Block Replacement
%\item 第八讲：May 25 (8.1-8.2) Brownian Motions -  the Reflection Principle 
%\end{enumerate}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{Contents }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}
\item  Poisson point process
\item  Waiting time and sojourn times
\item  Theorem (waiting time distribution)
\item  Theorem (sojourn time distribution)
\item  Theorem (uniform distribution of occurrence times)
\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{5.1. Poisson point process }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  A {\color{red}Poisson point process} $N((s,t])$ counts the number of events occurring in an interval $(s,t]$. 

\item  A Poisson counting process, or more simply a Poisson process $X(t)$, counts the number of events occurring up to time $t$. 
Formally, $X(t) = N((0,t])$.

\item  Poisson events occurring in space can best be modeled as a point process. 

\item  For Poisson events occurring on the positive time axis, whether we view them as a Poisson point process or Poisson counting process is largely a matter of convenience, and we will freely do both. 

\item  The two descriptions are equivalent for Poisson events occurring along a line. 

\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{5.2. Waiting time and sojourn times }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  The Poisson process is the more common and traditional description in this case because it allows a pictorial representation as an increasing integer-valued random function taking unit steps.
\item  The Figure shows a typical sample path of a Poisson process where $W_n$ is the time of occurrence of the $n$th event, the so-called {\color{red}waiting time}. 

\item  It is often convenient to set $W_0 = 0$. 

\item  The differences $S_n = W_{n+1} - W_n$ are called {\color{red}sojourn times}; $S_n$ measures the duration that the Poisson process sojourns in state $n$. 

\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{5.3.  }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  A typical sample path of a Poisson process showing the waiting times $W_n$ and the sojourn times $S_n$.

\begin{figure}
\centering
\includegraphics[height=0.6\textheight, width=0.9\textwidth]{pic/waiting-time-sojourn-time.png}
% \caption{ }
\end{figure}


\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{5.4. Theorem (waiting time distribution) }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  In this section, we will determine a number of probability distributions associated with the Poisson process $X(t)$, the waiting times $W_n$, and the sojourn times $S_n$. 

\item  {\color{red}Theorem}. The waiting time $W_n$ has the gamma distribution whose {\color{red}probability density function} is\begin{eqnarray}f_{W_n}(t) = \frac{\lambda^n t^{n-1}}{(n-1)!} e^{-\lambda t},\quad n=1,2,\cdots, t\ge 0. 
\label{eq5-12}
\end{eqnarray}
In particular, $W_1$, the time to the first event, is exponentially distributed:
\begin{eqnarray}
f_{W_1} (t) = \lambda e^{-\lambda t}, t \ge 0.
\label{eq5-13}
\end{eqnarray}


\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{5.5. Proof. }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  Proof. The event $W_n \le t$ occurs if and only if there are at least $n$ events in the interval $(0,t]$. 

\item  Since the number of events in $(0,t]$ has a Poisson distribution with mean $\lambda t$ we obtain the {\color{red}cumulative distribution function} of $W_n$ via
\begin{eqnarray*}
F_{W_n}(t) &=& \mathbb{P}\{ W_n \le t\} = \mathbb{P}\{ X(t)\ge n\}  \\ 
&=&  \sum\limits_{k=n}^{\infty} \frac{ (\lambda t)^k e^{-\lambda t}} {k!} = 1-  \sum\limits_{k=0}^{n-1} \frac{ (\lambda t)^k e^{-\lambda t}} {k!}. 
%\label{eq5-13}
\end{eqnarray*}

\item  We obtain the probability density function $f_{W_n}(t)$ by differentiating the cumulative distribution function. 

\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{5.6.  }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  There is an alternative derivation of the density in (\ref{eq5-12}) that uses the Poisson point process $N((s,t])$ and proceeds directly without differentiation. 

\item  The event $t< W_n \le t + \Delta t$ corresponds exactly to $n-1$ occurrences in $(0, t]$ and one in $(t, t + \Delta t]$, as depicted in the Figure. 

\begin{figure}
\centering
\includegraphics[height=0.4\textheight, width=0.9\textwidth]{pic/waiting-time-distribution-proof.png}
% \caption{ }
\end{figure}

\end{itemize}


\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{5.7.  }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  Then by the understanding of probability density function, 
\begin{eqnarray*}
f_{W_n} (t) \Delta t  &\approx&  \mathbb{P} \{t < W_n \le t + \Delta t\} + o(\Delta t) \\&=& \mathbb{P} \{ N((0, t]) = n - 1\} \mathbb{P} \{ N( (t, t + \Delta t] ) = 1\} + o(\Delta t) \\&=& \frac{ (\lambda t)^{n-1} e^{-\lambda t }} {(n-1)!} \lambda (\Delta t) + o(\Delta t).
%\label{eq5-13}
\end{eqnarray*}

\item  Dividing by  $\Delta t$ and passing to the limit as $t \to 0$ we obtain (\ref{eq5-12}).

\item  Observe that 
\begin{eqnarray*}
\mathbb{P}\{ N((t,t+\Delta t]) \ge 1\} &=& \mathbb{P}\{ N((t,t+\Delta t]) =1 \} + o(\Delta t)  \\
&=& \lambda (\Delta t) + o(\Delta t).
%\label{eq5-13}
\end{eqnarray*}

\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{5.8. Theorem (sojourn time distribution) }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  {\color{red}Theorem}. The sojourn times $S_0$, $S_1$, $\cdots$, $S_{n-1}$ are independent random variables, each having the exponential probability density function
\begin{eqnarray}f_{S_k} (s) = \lambda e^{-\lambda s},\quad s \ge 0. 
\label{eq5-14}
\end{eqnarray}

\item  Proof. We are being asked to show that the joint probability density function of $S_0$, $S_1$, $\cdots$, $S_{n-1}$ is the product of the exponential densities given by 
\begin{eqnarray}
f_{S_0,S_1,\cdots,S_{n-1}} ( s_0,s_1,\cdots,s_{n-1} ) 
=  (\lambda e^{-\lambda s_0}) (\lambda e^{-\lambda s_1}) \cdots (\lambda e^{-\lambda s_{n-1}}).
\label{eq5-15}
\end{eqnarray}

\item  We give the proof only in the case $n = 2$, the general case being entirely similar. 


\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{5.9.  }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  Referring to the Figure we see that the joint occurrence of
\begin{eqnarray}
s_1 < S_1 < s_1 + \Delta s_1 \text{ and } s_2 < S_2 < s_2 + \Delta s_2
\label{eq5-15}
\end{eqnarray}
corresponds to no events in the intervals $(0,s_1]$ and $(s_1 + \Delta s_1, s_1 +\Delta s_1 +s_2]$ and exactly one event in each of the intervals $(s_1,s_1 +\Delta s_1]$ and $(s_1 +\Delta s_1 +s_2, s_1 + \Delta s_1 + s_2 +\Delta s_2]$. 

\begin{figure}
\centering
\includegraphics[height=0.3\textheight, width=0.9\textwidth]{pic/sojourn-time-distribution-proof.png}
% \caption{ }
\end{figure}


\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{5.10. The Uniform Distribution and Poisson Processes }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  The major result of this section, Theorem 5.7, provides an important tool for computing certain functionals on a Poisson process. 

\item  It asserts that, conditioned on a fixed total number of events in an interval, the locations of those events are uniformly distributed in a certain way.

\item  After a complete discussion of the theorem and its proof, its application in a wide range of problems will be given.
\item  In order to completely understand the theorem, consider first the following experiment. 

\item  We begin with a line segment $t$ units long and a fixed number $n$ of darts and throw darts at the line segment in such a way that each dart's position upon landing is uniformly distributed along the segment, independent of the location of the other darts. 

\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{5.11.  }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  Let $U_1$ be the position of the first dart thrown, $U_2$ the position of the second, and so on up to $U_n$. 

\item  The probability density function is the uniform density
\begin{eqnarray*}
f_U(t) = \left\{ \begin{array}{ll}
1/t, & \text{ for } 0\le u\le t, \\
0, & \text{ elsewhere}.
\end{array}\right.
%\label{eq5-15}
\end{eqnarray*}

\item  Now let $W_1 \le W_2 \le \cdots \le W_n$ denote these same positions, not in the order in which the darts were thrown, but instead in the order in which they appear along the line. 

\begin{figure}
\centering
\includegraphics[height=0.2\textheight, width=0.9\textwidth]{pic/poisson-throws-darts.png}
% \caption{ }
\end{figure}

\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{5.12. Theorem (uniform distribution of occurrence times) }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

%\item  The Figure depicts a typical relation between $U_1,U_2,\cdots,U_n$ and $W_1,W_2,\cdots,W_n$.

\item  The joint probability density function for $W_1,W_2,\cdots,W_n$ is 
\begin{eqnarray}
f_{W_1,\cdots,W_n} (w_1,\cdots,w_n) = n! t^{-n} \quad \text{ for } 0 < w_1 < w_2 < \cdots <w_n \le t.
\label{eq5-17}
\end{eqnarray}

\item  For example, to establish (\ref{eq5-17}) in the case $n = 2$ we have
\begin{eqnarray*}
 && f_{W_1,W_2} (w_1, w_2 )\Delta w_1 \Delta w_2 \\&=& \mathbb{P} \{ w_1 < W_1 \le w_1 + \Delta w_1, w_2 < W_2 \le w_2 + \Delta w_2 \} \\ 
&=& 2t^{-2} \Delta w_1 \Delta w_2.
%\label{eq5-17}
\end{eqnarray*}
%Dividing by  w1 w2 and passing to the limit gives (5.17). 
%\item  When $n = 2$, there are two ways that $U_1$ and $U_2$ can be ordered; either $U_1$ is less than $U_2$, or $U_2$ is less than $U_1$. 
%In general, there are $n!$ arrangements of $U_1,\cdots,U_n$ that lead to the same ordered values $W_1 \le \cdots \le W_n$, thus giving (\ref{eq5-17}).

\item  {\color{red}Theorem}. Let $W_1$, $W_2$, $\cdots$ be the occurrence times in a Poisson process of rate $\lambda > 0$. 
Conditioned on $N(t) = n$, the random variables $W_1, W_2,\cdots, W_n$ have the joint probability density function
\begin{eqnarray}
f_{W_1,\cdots,W_n \mid X(t)=n} (w_1,\cdots,w_n) = n! t^{-n} \,\, \text{ for } 0 < w_1 < w_2 < \cdots <w_n \le t.
\label{eq5-18}
\end{eqnarray}


\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
%\begin{frame}{5.13. Theorem (uniform distribution of occurrence times) }
%
%\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%%每页详细内容
%
%\begin{itemize}
%
%\item  {\color{red}Theorem}. Let $W_1$, $W_2$, $\cdots$ be the occurrence times in a Poisson process of rate $\lambda > 0$. 
%Conditioned on $N(t) = n$, the random variables $W_1, W_2,\cdots, W_n$ have the joint probability density function
%\begin{eqnarray}
%f_{W_1,\cdots,W_n \mid X(t)=n} (w_1,\cdots,w_n) = n! t^{-n} \,\, \text{ for } 0 < w_1 < w_2 < \cdots <w_n \le t.
%\label{eq5-18}
%\end{eqnarray}
%
%
%\item  Proof. The event $w_i < W_i \le w_i + \Delta w_i$ for $i = 1,\cdots,n$ and $N(t) = n$ corresponds to no events occurring in any of the intervals $(0,w_1]$, $(w_1 +\Delta w_1,w_2]$, $\cdots$, $(w_{n-1} + \Delta w_{n-1},w_n]$, $(w_n + \Delta w_n,t]$, and exactly one event in each of the intervals $(w_1,w_1 + \Delta w_1]$, $(w_2,w_2 + \Delta w_2]$, $\cdots$, $(w_n,w_n + \Delta w_n]$. 
%These intervals are disjoint. 
%
%\end{itemize}
%
%\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{5.13. Example  }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  Customers arrive at a facility according to a Poisson process of rate $\lambda$. 

\item  Each customer pays \$1 on arrival, and it is desired to evaluate the expected value of the total sum collected during the interval $(0,t]$ discounted back to time 0. 

\item  This quantity is given by
\begin{eqnarray*}
M = \mathbb{E} \left[ \sum\limits_{k=1}^{X(t)} e^{-\beta W_k} \right] = \frac{\lambda}{\beta}(1-e^{\beta t}), 
%\label{eq5-18}
\end{eqnarray*}
where $\beta$ is the discount rate, $W_1, W_2, \cdots$ are the arrival times, and $X(t)$ is the total number of arrivals in $(0,t]$. The process is shown in the Figure.


\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{5.14.   }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  A dollar received at time $W_k$ is discounted to a present value at time 0 of $$\exp \{-\beta W_k \}.$$

\begin{figure}
\centering
\includegraphics[height=0.5\textheight, width=0.9\textwidth]{pic/poisson-dollar-discounted-value.png}
% \caption{ }
\end{figure}


\end{itemize}

\end{frame}


%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\end{document}


%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%









