%!TEX program = xelatex
%!TEX TS-program = xelatex
%!TEX encoding = UTF-8 Unicode

\documentclass[12pt,t,aspectratio=169,mathserif]{beamer}
%Other possible values are: 1610, 149, 54, 43 and 32. By default, it is to 128mm by 96mm(4:3).
%run XeLaTeX to compile.

\input{wang-slides-preamble.tex}

\begin{document}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\title{Applied Stochastic Processes - Lecture 06}
\subtitle{(6.1 - 6.1) Markov Chains - Continuous Time, Pure Birth Processes}
%(3.1-3.3) 
%\institute{上海立信会计金融学院}
\author{MAP SK}
\date{May 11, 2021}

\maketitle

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

%\begin{enumerate}
%\item 第一讲：March 9 (3.1-3.3) Markov Chains - Concepts and Examples
%\item 第二讲：March 16 (3.4-3.5) Markov Chains - First Step Analysis, More Examples
%\item 第三讲：March 23 (4.1-4.4) Markov Chains - Long Run Behaviors
%\item 第四讲：April 13 (5.1-5.2) Poisson Processes - Concept and Examples 
%\item 第五讲：April 20 (5.3-5.4) Poisson Processes - Associated Distributions  
%\item 第六讲：May 11 (6.1-6.1) Markov Chains - Continuous Time, Pure Birth Processes
%\item 第七讲：May 18 (7.1-7.2) Renewal Processes - Renewal Function, Block Replacement
%\item 第八讲：May 25 (8.1-8.2) Brownian Motions -  the Reflection Principle 
%\end{enumerate}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{Contents }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}
\item  Postulates for the Poisson process
\item  System of differential equations
\item  Pure birth process 
\item  The Yule process 
\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{6.1.  }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  In this chapter, we present several important examples of {\color{red}continuous time, discrete state}, and Markov processes. 

\item  Specifically, we deal here with a family of random variables $\{X(t); 0 \le t < \infty\}$ where the possible values of $X(t)$ are the nonnegative integers. 

\item  We shall restrict attention to the case where $\{X(t)\}$ is a Markov process with {\color{red}stationary} transition probabilities. 

\item  Thus, the transition probability function for $t > 0$,
\begin{eqnarray*}
P_{ij}(t) = \mathbb{P}\{X(t + u) = j\mid X(u) = i\},\quad i,j = 0,1,2,\cdots,
%\label{eq6-1}
\end{eqnarray*}
is independent of $u \ge 0$.

\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{6.2.  }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}


\item  It is usually more natural in investigating particular stochastic models based on physical phenomena to prescribe the so-called {\color{red}infinitesimal probabilities} relating to the process and then derive from them an explicit expression for the transition probability function. 

\item  For the case at hand, we will {\color{red}postulate} the form of $P_{ij}(h)$ for $h$ small, and, using the Markov property, we will derive a system of differential equations satisfied by $P_{ij}(t)$ for all $t > 0$. 

\item  The solution of these equations under suitable boundary conditions gives $P_{ij}(t)$.

\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{6.3. Postulates for the Poisson Process }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}


\item  By way of introduction to the general pure birth process, we review briefly the axioms characterizing the Poisson process.

\item  The Poisson process is the {\color{red}prototypical} pure birth process. Let us point out the relevant properties. 
The Poisson process is a Markov process on the nonnegative integers for which
\begin{enumerate}
\item[(i)]  $\mathbb{P} \{ X(t+h)- X(t)=1 \mid X(t)=x \} = \lambda h + o(h) \text{ as } h  \downarrow 0 \,\,\,(x = 0,1,2,\cdots)$.
\item[(ii)]  $\mathbb{P} \{ X(t+h)- X(t)=0 \mid X(t)=x \} =1-\lambda h + o(h) \text{ as } h \downarrow  0$. 
\item[(iii)]  $X(0)=0$.
\end{enumerate}

\item  The precise interpretation of (i) is the relationship 
\begin{eqnarray*}
\lim\limits_{h\to 0+} \frac{ \mathbb{P}\{ X(t+h)-X(t) = 1 \mid X(t)=x \} }{h} = \lambda.
%\label{eq6-1}
\end{eqnarray*}


\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{6.4.  }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  The $o(h)$ symbol represents a negligible remainder term in the sense that if we divide the term by $h$, then the resulting value tends to zero as $h$ tends to zero. Notice that the right side of (i) is independent of $x$.

\item  These properties are easily verified by direct computation, since the explicit formulas for all the relevant properties are available. 
Problem 6.1.13 calls for showing that these properties, in fact, define the Poisson process.


\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{6.5. Pure Birth Process }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  A natural generalization of the Poisson process is to permit the chance of an event occurring at a given instant of time to depend upon the number of events that have already occurred. 

An example of this phenomenon is the reproduction of living organisms (and hence the name of the process), in which under certain conditions - e.g., sufficient food, no mortality, no migration - the infinitesimal probability of a birth at a given instant is proportional (directly) to the population size at that time. 

This example is known as the {\color{red}Yule process} and will be considered in detail later.

\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{6.6.  }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}


\item  Consider a sequence of positive numbers, $\{\lambda_k\}$. We define a {\color{red}pure birth process} as a Markov process satisfying the following postulates:
\begin{eqnarray}
\begin{array}{l}
1.\, {\small \mathbb{P} \{ X(t+h)- X(t)=1 \mid X(t)=k \} = \lambda_k h + o_{1,k}(h) \text{ as } h \downarrow  0. } \\
2.\, {\small  \mathbb{P} \{ X(t+h)- X(t)=0 \mid X(t)=k \} =1-\lambda_k h + o_{2,k}(h) \text{ as } h \downarrow 0.}  \\
3.\, {\small  \mathbb{P} \{ X(t+h)- X(t)<0 \mid X(t)=k \} =0 \text{ for all } k\ge 0. } \\
4.\, {\small  X(0)=0. }
\end{array}
\label{eq6-1}
\end{eqnarray}

\item  With this postulate, $X(t)$ does not denote the population size but, rather, the number of births in the time interval $(0,t]$.

\item  Note that the left sides of Postulates (1) and (2) are just $P_{k,k+1}(h)$ and $P_{k,k}(h)$, respectively (owing to stationarity), so that $o_{1,k}(h)$ and $o_{2,k}(h)$ do not depend upon $t$.


\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{6.7. System of Differential Equations }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}


\item  We define $P_n(t) = \mathbb{P}\{X(t) = n\}$, assuming $X(0) = 0$.

\item  By analyzing the possibilities at time $t$ just prior to time $t + h$ ($h$ small), we will derive a {\color{red}system of differential equations} satisfied by $P_n(t)$ for $t \ge 0$, namely
\begin{eqnarray}
\begin{array}{rcl}
P'_0(t) &=& -\lambda_0 P_0(t), \\
P'_n(t) &=& -\lambda_n P_n(t) + \lambda_{n-1}P_{n-1}(t) \text{ for } n \ge 1, 
\end{array}
\label{eq6-2}
\end{eqnarray}
with initial conditions
\begin{eqnarray*}
P_0(0) = 1, \quad P_n(0) = 0,\quad n > 0.
\end{eqnarray*}


\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{6.8.  }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  Indeed, if $h > 0$, $n \ge 1$, then by invoking the law of total probability, the Markov property, and Postulate (3), we obtain
\begin{eqnarray*}
P_n(t + h) 
&=& \sum\limits_{k=0}^{\infty} P_k(t) \mathbb{P}\{ X(t + h) = n \mid X(t) = k \} \\
&=&  \sum\limits_{k=0}^{\infty} P_k(t) \mathbb{P}\{ X(t + h) - X(t) = n - k \mid X(t) = k \} \\
&=&  \sum\limits_{k=0}^{\infty} P_k(t) \mathbb{P}\{ X(t + h) - X(t) = n - k \mid X(t) = k \}.
%\label{eq6-2}
\end{eqnarray*}

\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{6.9.  }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  Now for $k = 0,1,\cdots ,n-2$, we have
{\small 
\begin{eqnarray*}
\mathbb{P}\{ X(t + h) - X(t) = n - k \mid X(t) = k\} & \le &  \mathbb{P}\{ X(t + h) - X(t) \ge 2 \mid X(t) = k \} \\
& = & o_{1,k} (h) + o_{2,k} (h), \\
& = & o_{3,n,k}(h). 
%\label{eq6-2}
\end{eqnarray*}
}

%or
%\item  Rewrite it in another way, 
%{\small 
%\begin{eqnarray*}
%\mathbb{P}\{ X(t + h) - X(t) = n - k\mid X(t) = k\} = o_{3,n,k}(h), k=0,1,\cdots,n-2.
%%\label{eq6-2}
%\end{eqnarray*}
%}

\item Thus, by the law of total probability,
{\small 
\begin{eqnarray*}
P_n(t + h) &=& P_n(t) [1 - \lambda_nh + o_{2,n}(h) ] + P_{n-1}(t) [ \lambda_{n-1}h + o_{1,n-1}(h) \\ 
&+&  \sum\limits_{k=0}^{n-2} P_k(t)o_{3,n,k} (h)k,
%\label{eq6-2}
\end{eqnarray*}
}

\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{6.10.  }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  Rewrite it in another way, 
%or
{\small 
\begin{eqnarray}
&& P_n(t + h) - P_n(t) \nonumber \\
&=& P_n(t) [ -\lambda_nh + o_{2,n}(h) ] + P_{n-1}(t) [ \lambda_{n-1}h + o_{1,n-1}(h) ] + o_n(h),
\label{eq6-3}
\end{eqnarray}
}
where, clearly, $\lim_{h\to 0+} o_n(h)/h = 0$ uniformly in $t \ge 0$, since $o_n(h)$ is bounded by the finite sum $ \sum_{k=0}^{n-2}o_{3,n,k}(h)$, which does not depend on $t$.


\item  Dividing by $h$ and passing to the limit $h  \downarrow 0$, we validate the relations (\ref{eq6-2}), where on the left side we should, to be precise, write the derivative from the right. 

%\end{itemize}
%
%\end{frame}
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%\begin{frame}[fragile=singleslide]{6.1.1. }
%\begin{frame}{6.11.  }
%
%\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%%每页详细内容
%
%\begin{itemize}
%
%\item  With a little more care, however, we can derive the same relation involving the derivative from the left. 
%
%\item  In fact, from (\ref{eq6-3}), we see at once that the $P_n(t)$ are continuous functions of $t$. 
%
%\item  Replacing $t$ by $t - h$ in (\ref{eq6-3}), dividing by $h$, and passing to the limit $h \to 0+$, we find that each $P_n(t)$ has a left derivative that also satisfies equation (\ref{eq6-2}).
%

\item  The first equation of (\ref{eq6-2}) can be solved immediately and yields
\begin{eqnarray}
P_0(t) = \exp\{-\lambda_0t\} \quad \text{ for } t > 0.
\label{eq6-4}
\end{eqnarray}


\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{6.11.  }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  Define $S_k$ as the time between the $k$th and the $(k+1)$st birth, so that
\begin{eqnarray*}
P_n(t) = \mathbb{P} \left\{ \sum\limits_{i=0}^{n-1} S_i \le t< \sum\limits_{i=0}^{n} S_i \right\}.
%\label{eq6-4}
\end{eqnarray*}

\item  The random variables $S_k$ are called the `sojourn times' between births, and
\begin{eqnarray*}
W_k =  \sum\limits_{i=0}^{k-1} S_i = \text{ the time at which the $k$th birth occurs }.
%\label{eq6-4}
\end{eqnarray*}

\item  We have already seen that $P_0(t) = \exp\{-\lambda_0t\}$. Therefore, 
\begin{eqnarray*}
\mathbb{P}\{S_0 \le t\} = 1 - \mathbb{P} \{X(t) = 0 \} = 1 - \exp\{ -\lambda_0t\};
%\label{eq6-4}
\end{eqnarray*}
that is $S_0$ has an exponential distribution with parameter $\lambda_0$. 


\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
%\begin{frame}{6.13.  }
%
%\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%%每页详细内容
%
%\begin{itemize}
%
%\item  It may be deduced from Postulates (1) through (4) that $S_k\, (k>0)$, also has an exponential distribution with parameter $\lambda_k$ and that the $S_i$'s are mutually independent. 
%
%\item  This description characterizes the pure birth process in terms of its sojourn times, in contrast to the infinitesimal description corresponding to (\ref{eq6-1}).
%
%
%
%\end{itemize}
%
%\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{6.12. The Yule Process }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  The Yule process arises in physics and biology and describes the growth of a population in which each member has a probability $\beta h+o(h)$ of giving birth to a new member during an interval of time of length $h\, (\beta > 0)$. 

\item  Assuming independence and no interaction among members of the population, the binomial theorem gives
\begin{eqnarray*}
\mathbb{P}\{ X(t+h) - X(t) &=& 1 \mid X(t) = n\} = \binom{n}{1} [ \beta h + o(h) ] [1-\beta h + o(h) ]^{n-1} \\ &=& n\beta h + o_n(h);
%\label{eq6-4}
\end{eqnarray*}
 for the Yule process the infinitesimal parameters are $\lambda_n = n\beta$. 
In words, the total population birth rate is directly proportional to the population size, the proportionality constant being the individual birth rate $\beta$. 


\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{6.13.  }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}


\item  As such, the Yule process forms a stochastic analog of the deterministic population growth model represented by the differential equation $dy/dt = \alpha y$. 
In the deterministic model, the rate $dy/dt$ of population growth is directly proportional to population size $y$. 

\item  In the stochastic model, the infinitesimal deterministic increase $dy$ is replaced by the probability of a unit increase during the infinitesimal time interval $dt$. 

\item  Similar connections between deterministic rates and birth (and death) parameters arise frequently in stochastic modeling. 
Examples abound in this chapter.



\end{itemize}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%\begin{frame}[fragile=singleslide]{6.1.1. }
\begin{frame}{6.14.  }

\vspace{-0.4cm}\noindent\makebox[\linewidth]{\rule{\paperwidth}{0.4pt}}
%每页详细内容

\begin{itemize}

\item  The system of equations (\ref{eq6-2}) in the case that $X(0) = 1$ becomes 
\begin{eqnarray*}
P'_n (t) = -\beta [nP_n (t) - (n-1)P_{n-1} (t)], \quad n = 1, 2, \cdots,
%\label{eq6-4}
\end{eqnarray*}
under the initial conditions
%\begin{eqnarray*}
$P_1(0) = 1,\quad P_n(0) = 0, \quad n = 2,3,\cdots.$ 
%\label{eq6-4}
%\end{eqnarray*}

\item  Its solution is
\begin{eqnarray}
P_n(t) = e^{-\beta t}(1- e^{-\beta t})^{n-1},\quad  n\ge 1, 
\label{eq6-10}
\end{eqnarray}
as may be verified directly. We recognize (\ref{eq6-10}) as the geometric distribution with $p=e^{-\beta t}$.


\end{itemize}

\end{frame}


%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\end{document}


%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%









