%!TEX program = xelatex
%!TEX TS-program = xelatex
%!TEX encoding = UTF-8 Unicode

\documentclass[12pt,t,aspectratio=169,mathserif]{beamer}
%Other possible values are: 1610, 149, 54, 43 and 32. By default, it is to 128mm by 96mm(4:3).
%run XeLaTeX to compile.

\input{wang-slides-preamble.tex}

\begin{document}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\title{Applied Stochastic Processes - Lecture 04}
\subtitle{(5.1 - 5.2) Poisson Processes - Concept and Examples}
%(5.1-5.2) 
%\institute{上海立信会计金融学院}
\author{MAP SK}
\date{April 13, 2021}

\maketitle

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

%\begin{enumerate}
%\item 第一讲：March 9 (3.1-3.3) Markov Chains - Concepts and Examples
%\item 第二讲：March 16 (3.4-3.5) Markov Chains - First Step Analysis, More Examples
%\item 第三讲：March 23 (4.1-4.4) Markov Chains - Long Run Behaviors
%\item 第四讲：April 13 (5.1-5.2) Poisson Processes - Concept and Examples 
%\item 第五讲：April 20 (5.3-5.4) Poisson Processes - Associated Distributions  
%\item 第六讲：May 11 (6.1-6.1) Markov Chains - Continuous Time, Pure Birth Processes
%\item 第七讲：May 18 (7.1-7.2) Renewal Processes - Renewal Function, Block Replacement
%\item 第八讲：May 25 (8.1-8.2) Brownian Motions -  the Reflection Principle 
%\end{enumerate}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{Contents }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}
\item  The Poisson distribution
\item  Two theorems in Poisson distribution
\item  The Poisson process
\item  Non-homogeneous process
\item  The law of rare events
\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{4.1. The Poisson Distribution }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  Poisson behavior is so {\color{red}pervasive} in natural phenomena and the Poisson distribution is so {\color{red}amenable} to extensive and elaborate analysis as to make the Poisson process a cornerstone of stochastic modeling.

\item  The Poisson distribution with parameter $\mu > 0$ is given by 
\begin{eqnarray}
p_k = \frac{e^{-\mu}\mu^k}{k!} \text{ for } k = 0,1,\cdots . 
\label{eq5-1}
\end{eqnarray} 

\item  Let $X$ be a random variable having the Poisson distribution in (\ref{eq5-1}). 

\item  We evaluate the mean, or first moment, via
\begin{eqnarray*}
\mathbb{E} [X] = \sum\limits_{k=0}^{\infty} kp_k =  \sum\limits_{k=1}^{\infty} \frac{ke^{-\mu}\mu^k}{k!} 
= \mu \sum\limits_{k=1}^{\infty} \frac{e^{-\mu}\mu^{k-1}}{(k-1)!} = \mu. 
%\label{eq5-1}
\end{eqnarray*} 


\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{4.2.  }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  To evaluate the variance, it is easier first to determine
\begin{eqnarray*}
\mathbb{E} [X(X - 1)] = \sum\limits_{k=2}^{\infty}  k(k - 1)p_k = \mu^2 e^{-\mu} \sum\limits_{k=2}^{\infty}  \frac{\mu^{k-2}}{(k-2)!} = \mu^2. 
%\label{eq5-1}
\end{eqnarray*} 

Then $\mathbb{E}[X^2]  = \mathbb{E} [ X(X - 1)] + \mathbb{E} [X] = \mu^2 + \mu$, while 
\begin{eqnarray*}
\mathbb{V}ar [X] = \mathbb{E}[X^2] - \mathbb{E}[X]^2 = \mu^2+\mu - \mu^2 = \mu. 
%\label{eq5-1}
\end{eqnarray*} 

\item  Thus, the Poisson distribution has the unusual characteristic that both the mean and the variance are given by the same value $\mu$.


\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{4.3. Two Theorems in Poisson distribution }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  Two fundamental properties of the Poisson distribution, which will arise later in a variety of forms, concern the sum of independent Poisson random variables and certain random decompositions of Poisson phenomena. 

\item  Theorem 5.1. Let $X$ and $Y$ be independent random variables having Poisson distributions with parameters $\mu$ and $\nu$, respectively. Then the sum $X+Y$ has a Poisson distribution with parameter $\mu +\nu$. 

\item  Theorem 5.2. Let $N$ be a Poisson random variable with parameter $\mu$, and conditional on $N$, let $M$ have a binomial distribution with parameters $N$ and $p$. Then the unconditional distribution of $M$ is Poisson with parameter $\mu p$.

\item  The Poisson process entails notions of both independence and the Poisson distribution.

\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{4.4. The Poisson Process }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  Definition. A {\color{red}Poisson process} of intensity, or rate, $\lambda > 0$ is an integer-valued stochastic process $\{X(t); t \ge 0\}$ for which
\begin{enumerate}

\item  for any time points $0= t_0 < t_1 < t_2 < \cdots < t_n$, the process increments $X(t_1)-X(t_0)$, $X(t_2)-X(t_1)$, $\cdots$, $X(t_n)-X(t_{n-1})$ are independent random variables;

\item  for $s\ge 0$ and $t>0$, the random variable $X(s+t)-X(s)$ has the Poisson distribution
\begin{eqnarray*}
\mathbb{P}\{X(s + t) - X(s) = k\} = \frac{ (\lambda t)^k e^{-\lambda t} }{k!} \text{ for } k = 0,1, \cdots;
%\label{eq5-1}
\end{eqnarray*} 

\item  $X(0)=0$.

\end{enumerate}

\item  In particular, observe that if $X(t)$ is a Poisson process of rate $\lambda > 0$, then the moments are
%\begin{eqnarray*}
$\mathbb{E} [X(t)] = \lambda t \text{ and } \mathbb{V}ar [X(t)] = \lambda t.
$%\label{eq5-1}
%\end{eqnarray*} 

\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{4.5. Example 1}

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item   Defects occur along an undersea cable according to a Poisson process of rate $\lambda = 0.1$ per mile. 

\begin{enumerate}
\item  What is the probability that no defects appear in the first two miles of cable? 
\item  Given that there are no defects in the first two miles of cable, what is the conditional probability of no defects between mile points two and three? 
\end{enumerate}

\item  To answer (a) we observe that $X(2)$ has a Poisson distribution whose parameter is $(0.1)(2) = 0.2$. Thus, $\mathbb{P}\{X(2) = 0\} = e^{-0.2} = 0.8187.$ 

\item  In part (b), we use the independence of $X(3) - X(2)$ and $X(2) - X(0) = X(2)$. Thus, the conditional probability is the same as the unconditional probability, and
$$\mathbb{P}\{X(3) - X(2) = 0\} = \mathbb{P}\{X(1) = 0\} = e^{-0.1} = 0.9048.$$

\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{4.6. Example 2 }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  Customers arrive in a certain store according to a Poisson process of rate $\lambda = 4$ per hour. Given that the store opens at 9:00 a.m., what is the probability that exactly one customer has arrived by 9:30 and a total of five have arrived by 11:30 a.m.?

\item  Measuring time $t$ in hours from 9:00 a.m., we are asked to determine $\mathbb{P}\{X(1/2)=1, X(5/2) =5\}$. 
We use the independence of $X(5/2)-X(1/2)$ and $X(1/2)$  to reformulate the question. Thus
\begin{eqnarray*}
&& \mathbb{P}\{X(1/2)=1, X(5/2) =5\} \\
&=& \mathbb{P}\{X(1/2)=1, X(5/2)-X(1/2) =4\} \\
&=& \mathbb{P}\{X(1/2)=1\} \mathbb{P}\{X(5/2)-X(1/2) =4\} \\
&=& 1024e^{-10}/3. 
%\label{eq5-1}
\end{eqnarray*} 

\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{4.7. Non-homogeneous Processes }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  The rate $\lambda$ in a Poisson process $X(t)$ is the {\color{red}proportionality} constant in the probability of an event occurring during an arbitrarily small interval. 

\item  To explain this more precisely,
\begin{eqnarray*}
\mathbb{P}\{X(t + h) - X(t) = 1\} &=& (\lambda h)e^{-\lambda h} \\
&=& (\lambda h) \left( 1- \lambda h + \lambda^2 h^2 - \cdots \right) \\
&=& \lambda h + o(h),
%\label{eq5-1}
\end{eqnarray*} 
where $o(h)$ denotes a general and unspecified remainder term of smaller order than $h$.

\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{4.8.  }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  It is {\color{red}pertinent} in many applications to consider rates $\lambda = \lambda(t)$ that vary with time. 

\item  Such a process is termed a non-homogeneous or non-stationary Poisson process to distinguish it from the stationary, or homogeneous, process that we primarily consider. 

\item  If $X(t)$ is a non-homogeneous Poisson process with rate $λ(t)$, then an increment $X(t) - X(s)$, giving the number of events in an interval $(s, t]$, has a Poisson distribution with parameter $$\int_s^t \lambda (u) du,$$ and increments over disjoint intervals are independent random variables.

\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{4.9. Example }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  Demands on a first aid facility in a certain location occur according to a non-homogeneous Poisson process having the rate function
\begin{eqnarray*} 
\lambda(t) = \left\{ \begin{array}{ll}
2t, & \text{ for } 0 \le t < 1, \\
2, & \text{ for } 1 \le t< 2, \\
4-t, & \text{ for } 2 \le t \le 4, \\
\end{array} \right.
%\label{eq5-1}
\end{eqnarray*} 
where $t$ is measured in hours from the opening time of the facility. 

\item  What is the probability that two demands occur in the first 2h of operation and two in the second 2h? 


\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{4.10.  }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  Since demands during disjoint intervals are independent random variables, we can answer the two questions separately. 

\item  The mean for the first 2h is $\mu = \int_0^1 2tdt + \int_1^2 2dt = 3$, and thus
$$\mathbb{P}\{ X(2)=2\} = \frac{e^{-3}(3)^2}{2!} = 0.2240. $$

\item  For the second 2h, $\mu=\int_2^4 (4-t)dt = 2$, and 
$$\mathbb{P}\{ X(4)-X(2)=2\} = \frac{e^{-2}(2)^2}{2!} = 0.2707. $$


\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{4.11. }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  Let $X(t)$ be a non-homogeneous Poisson process of rate $\lambda(t) > 0$ and define $$\Lambda (t) = \int_0^t \lambda (u)du.$$

\item  Make a deterministic change {\color{red}in the time scale} and define a new process $Y(s) = X(t)$, where $s =  \Lambda(t)$. 
Observe that  $\Delta s = \lambda (t) \Delta t + o(\Delta t)$. Then
\begin{eqnarray*} 
\mathbb{P}\{ Y(s + \Delta s) - Y(s) = 1\} &=& \mathbb{P}\{ X(t + \Delta t) - X(t) = 1\} \\
&=& \lambda (t)\Delta t + o(\Delta t) = \Delta s + o(\Delta s). 
%\label{eq5-1}
\end{eqnarray*} 
so that $Y(s)$ is a homogeneous Poisson process of unit rate. 

\item  By this means, questions about non-homogeneous Poisson processes can be transformed into corresponding questions about homogeneous processes. 

%For this reason, we concentrate our exposition on the latter.


\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{4.12. The Law of Rare Events }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  The common occurrence of the Poisson distribution in nature is explained by {\color{red}the law of rare events}. 

\item  Informally, this law asserts that where a certain event may occur in any of a large number of possibilities, but where the probability that the event does occur in any given possibility is small, then the total number of events that do happen should follow, approximately, the Poisson distribution. 

\item  A more formal statement in a particular instance follows. 

\item  Consider a large number $N$ of independent Bernoulli trials where the probability $p$ of success on each trial is small and constant from trial to trial. 


\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{4.13.  }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  Let $X_{N,p}$ denote the total number of successes in the $N$ trials, where $X_{N,p}$ follows the binomial distribution
\begin{eqnarray} 
\mathbb{P} \{ X_{N,p} = k \}  = \frac{N!}{ k! (N - k)!} p^k(1-p)^{N-k} \text{ for } k = 0,\cdots, N. 
\label{eq5-4}
\end{eqnarray} 

\item  Now let us consider the limiting case in which $N\to\infty$ and $p\to 0$ in such a way that $Np = \mu > 0$ where $\mu$ is constant. 

\item  It is a familiar fact that the distribution for $X_{N,p}$ becomes, in the limit, the Poisson distribution
\begin{eqnarray} 
\mathbb{P} \{ X_{\mu} = k \}  = \frac{e^{-\mu}\mu^k}{k!} \text{ for } k = 0,1,\cdots . 
\label{eq5-5}
\end{eqnarray} 

\item  This form of the law of rare events is stated as a limit. 

\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{4.14.  }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  In stochastic modeling, the law is used to suggest circumstances under which one might expect the Poisson distribution to prevail, at least approximately. 

\item  For example, a large number of cars may pass through a given stretch of highway on any particular day. 

\item  The probability that any specified car is in an accident is, we hope, small. 

\item  Therefore, one might expect that the actual number of accidents on a given day along that stretch of highway would be, at least approximately, Poisson distributed.




\end{itemize}

\end{frame}


%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\end{document}


%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%









