
%!TEX program = xelatex
%!TEX TS-program = xelatex
%!TEX encoding = UTF-8 Unicode

\documentclass[10pt]{article} 

\input{wang_preamble.tex}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usepackage{titling}
\setlength{\droptitle}{-2cm}   % This is your set screw

%%文档的题目、作者与日期
\author{王立庆（2019级数学与应用数学1班）}
\title{应用随机过程(全英语)：教案}
%\date{\vspace{-3ex}}
\renewcommand{\today}{\number\year \,年 \number\month \,月 \number\day \,日}
%\date{2020 年 2 月 28 日}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\begin{document}

\maketitle

\section*{目录}

\begin{enumerate}
\item 第一讲：(3.1-3.3) Markov Chains - Concepts and Examples
\item 第二讲：(3.4-3.5) Markov Chains - First Step Analysis, More Examples
\item 第三讲：(4.1-4.4) Markov Chains - Long Run Behaviors
\item 第四讲：(5.1-5.2) Poisson Processes - Concept and Examples 
\item 第五讲：(5.3-5.4) Poisson Processes - Associated Distributions  
\item 第六讲：(6.1-6.1) Markov Chains - Continuous Time, Pure Birth Processes
\item 第七讲：(7.1-7.2) Renewal Processes - Renewal Function, Block Replacement
\item 第八讲：(8.1-8.2) Brownian Motions -  the Reflection Principle 
\end{enumerate}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\newpage
\section{Applied Stochastic Processes - Lecture 01}

%\subsection{内容提要}%Markov Chains - Concepts and Examples
\subsection{Summary}
\begin{itemize}
\item  Stochastic processes
\item  Discrete-time Markov chain
\item  Transition probability
\item  Transition probability matrix
\item  Chapman-Kolmogorov equations
\item  An inventory model
\end{itemize}

%\subsection{例题讲解}
%\subsection{Contents}
\subsection{Examples}
\begin{enumerate}
%\item The concept of a Markov process.
%\item The concept of a Markov chain.
%\item Transition probability matrices, initial distribution, Exercise 3.1.1.
%\item Two pictures of the Markov chain in Exercise 3.1.1.
%\item An illustration to the proof of CK equation.
%\item $n$-step transition probabilities, Problem 3.2.3.
%\item The Inventory Model as a Markov chain.

\item  A Markov chain $\{X_0,X_1,\cdots\}$ on states $0,1,2$ has the transition probability matrix $P$ 
and initial distribution $(p_0,p_1,p_2)$. Determine $P[X_0=0,X_1=1,X_2=2]$.
\begin{eqnarray*}
P=\begin{bmatrix} 0.1 & 0.2 & 0.7 \\ 0.9 & 0.1 & 0 \\ 0.1 & 0.8 & 0.1 \end{bmatrix},\hspace{0.3cm}
\left\{\begin{array}{l}
p_0=P[X_0=0]=0.3, \\ 
p_1=P[X_0=1]=0.4, \\
p_2=P[X_0=2]=0.3. 
\end{array}
\right.
\end{eqnarray*}

\item Let $X_n$ denote the quality of the $n$th item produced by a production system with $X_n=0$ meaning ``good''  and $X_n=1$ meaning ``defective''. Suppose that $X_n$ evolves as a Markov chain with the following transition probability matrix. 
What is the probability that the fourth item is defective given that the first item is defective?
\begin{eqnarray*}
P=\begin{bmatrix} 0.99 & 0.01 \\ 0.12 & 0.88  \end{bmatrix}.
\end{eqnarray*}

\item 
%Consider a situation in which a commodity is stocked in order to satisfy a continuing demand. 
Commodity is stocked for an independent random demand $\xi_n$ during period $n$. 
At the end of each period, if the stock quantity is no greater than $s$, then increase the quantity to the level $S$.
%We assume that the replenishment of stock takes place at the end of periods labeled $n=0,1,2,\cdots$; 
%We assume that the total demand for the commodity during period $n$ is a random variable $\xi_n$, whose distribution function is independent of the time period, and the stock level is examined at the end of each period. 
%A replenishment policy is prescribed by specifying two non-negative critical numbers $s$ and $S>s$ whose interpretation is, if the end-of-period stock quantity is not greater than $s$, then an amount sufficient to increase the quantity of stock on hand up to the level $S$ is immediately procured. 
%If, however, the available stock is in excess of $s$, then no replenishment of stock is undertaken. 
Let $X_n$ denote the quantity on hand at the end of period $n$ just prior to restocking.
%The states of the process $\{X_n\}$ consist of the possible values of stock size $S,S-1,\cdots,1,0,-1,-2,\cdots$, where a negative value is interpreted as an unfilled demand that will be satisfied immediately upon restocking.
\begin{enumerate}
\item What are the states of this process?
\item Express the relation $X_{n+1}=f(X_n,\xi_n)$ explicitly.
\item Explain that $\{X_n,n=0,1,2,\cdots\}$ is a Markov chain.
\end{enumerate}

\end{enumerate}

%\subsection{课后习题}
\subsection{Homework 01}
E3.1.2, P3.1.4, E3.2.2, P3.2.4, E3.3.2, P3.3.6.
	
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\newpage
\section{Applied Stochastic Processes - Lecture 02}

%\subsection{内容提要}%Markov Chains - First Step Analysis
\subsection{Summary}
%\subsection{Methods}
\begin{itemize}
\item  First step analysis
\item  Two-state Markov chain
\item  One-dimensional random walks
\end{itemize}


%\subsection{例题讲解}
%\subsection{Contents}
\subsection{Examples}
\begin{enumerate}
\item Consider the Markov chain with the following transition probability matrix $P$ where $\alpha > 0$, $\beta > 0$, $\gamma > 0$, and $\alpha+\beta+\gamma = 1$. 
In which state, 0 or 2, is the process ultimately trapped, and how long, on the average, does it take to reach one of these states? 
\[ P=\begin{bmatrix}1 & 0 & 0 \\ \alpha & \beta & \gamma \\ 0 & 0 & 1 \end{bmatrix}. \]
%{\small Explain: If the Markov chain begins in state 1, it remains there for a random duration and then proceeds either to state 0 or to state 2, where it is trapped or absorbed. That is, once in state 0 the process remains there for ever after, as it also does in state 2. }

\item A white rat is put into the maze shown; the rat would move through the maze at random.
Assume that the rat makes one change to some adjacent compartment at each unit of time and let $X_n$ denote the compartment occupied at stage $n$. We ask the probability that the rat, moving at random, encounters the food before being shocked. 

\item Let the state space be $\{0,1\}$. Consider the following transition probability matrix. 
\[ P=\begin{bmatrix} 1-a & a  \\ b & 1-b \end{bmatrix}, \,  (0<a,b<1).\]
\begin{enumerate}
\item Compute the $n$-step transition matrix $P^{(n)}$, and its limit as $n\to\infty$.
\item In the long run, this system will be in state 0 with probability $b/(a + b)$ and in state 1 with probability $a/(a + b)$, irrespective of the initial state.
\end{enumerate}

\item Let $\xi$ denote a discrete-valued random variable whose possible values are the nonnegative integers and where $P(\xi=i) = a_i\ge 0$, for $i=0,1,\cdots$, and $\sum_i a_i = 1$. 
Let $\xi_1,\xi_2,\cdots$ represent independent observations of $\xi$.
Show that the following are Markov chains, and find the transition probability matrix in each case.
\begin{enumerate}
\item The sequence $\{\xi_1,\xi_2,\cdots\}$ itself. %{\color{blue}Find its transition probability matrix.}
\item The partial maxima $X_n=\max\{\xi_1,\cdots,\xi_n\}$, for $n=1,2,\cdots$.
%Suppose $\xi_1, \xi_2, \cdots$ represent successive bids on a certain asset that is offered for sale. Then $X_n$ is the maximum that is bid up to stage $n$. {\color{blue}Find its transition probability matrix.}
%Suppose that the bid that is accepted is the first bid that equals or exceeds a prescribed level $M$. 
%The time of sale is the random variable $T$. {\color{blue}Find its expectation $E(T)$.}
%$T = \min\{n\ge 1 \mid X_n\ge M\}$.
\item The successive partial sums $ X_n=\xi_1+\cdots+\xi_n,\, n\ge 1; \,\, X_0=0$. 
\end{enumerate}

\end{enumerate}

%\subsection{课后习题}
\subsection{Homework 02}
E.3.4.1, E3.4.2, P3.4.1, P3.4.5, E3.5.1, P3.5.4. 

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\newpage
\section{Applied Stochastic Processes - Lecture 03}

%\subsection{内容提要}%Markov Chains - Concepts and Examples
\subsection{Summary}
\begin{itemize}
\item  Regular Markov chains
\item  Limiting theorem of regular Markov chains
\item  Doubly stochastic matrices
\item  Two interpretations of the limiting distribution
\item  Optimal replacement rules
\end{itemize}

%\subsection{例题讲解}
\subsection{Examples}
\begin{enumerate}

\item Consider a two-state Markov chain with the following transition probability matrix.
\[ P=\begin{bmatrix} 1/3&2/3 \\ 3/4& 1/4 \end{bmatrix}. \] 
\begin{enumerate}
\item Explain that this is a regular Markov chain.
\item Find the limiting distribution and stationary distribution.
\end{enumerate}

\item Consider a Markov chain with the following transition probability matrix.
\[ P=\begin{bmatrix} 0.5&0.5&0&0&0 \\ 0.25&0.75&0&0&0 \\ 0&0&0&1&0 \\ 0&0&0.5&0&0.5 \\ 0&0&0&1&0 \end{bmatrix}. \] 
\begin{enumerate}
\item Explain the equivalence relation of communication.
\item Divide the state space into equivalence classes according to communication.
\end{enumerate}

\item Consider a Markov chain with the following transition probability matrix. Compute the period of each state.
\[ P=\begin{bmatrix} 0&1&0&0 \\ 0&0&1&0 \\ 0&0&0&1 \\ 0.5&0&0.5&0   \end{bmatrix}. \] 

\item Consider the Markov chain on $\{0,1\}$ with the following transition probability matrix.
\[ P=\begin{bmatrix} 1-a&a \\ b& 1-b \end{bmatrix}. \] 
\begin{enumerate}
\item Compute the first return distribution to state 0.
\item Compute the mean return time to state 0. 
\item Find the stationary distribution.
\end{enumerate}

\end{enumerate}

%\subsection{课后习题}
\subsection{Homework 03}
E4.1.10, P4.1.1, P4.1.5, E4.2.6, E4.3.2, E4.4.2.

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\newpage
\section{Applied Stochastic Processes - Lecture 04}

%\subsection{内容提要}%Markov Chains - Concepts and Examples
\subsection{Summary}
\begin{itemize}
\item  The Poisson distribution
\item  Two theorems in Poisson distribution
\item  The Poisson process
\item  Non-homogeneous process
\item  The law of rare events
\end{itemize}

%\subsection{例题讲解}
%\subsection{Contents}
\subsection{Examples}
\begin{enumerate}
\item Let $X$ and $Y$ be independent random variables having Poisson distributions with parameters $\mu$ and $\nu$, respectively. Then the sum $X + Y$ has a Poisson distribution with parameter $\mu + \nu$.

\item Customers arrive in a certain store according to a Poisson process of rate $\lambda = 4$ per hour. 
Given that the store opens at 9:00 A.M., what is the probability that exactly one customer has arrived by 9:30 and a total of five have arrived by 11:30 A.M.?

\item Demands on a first aid facility in a location occur according to a non-homogeneous Poisson process having the rate function
\begin{eqnarray*}
\lambda(t) = \left\{\begin{array}{ll}
2t,& \textrm{ for } 0\le t< 1\\
2,& \textrm{ for } 1\le t< 2\\
4-t,& \textrm{ for } 2\le t\le 4
\end{array}\right.
\end{eqnarray*}
where $t$ is measured in hours from the opening time of the facility. 
What is the probability that two demands occur in the first two hours of operation and two in the second two hours? 

\item Let $\xi_1,\xi_2,\cdots,\xi_n$ be independent Bernoulli random variables, where 
\[ P[\xi_k=1]=p_k,\,\, P[\xi_k=0]=1-p_k,\,\, 1\le k\le n.\]
Let $S_n=\xi_1+\xi_2+\cdots+\xi_n$ and $\mu=p_1+p_2+\cdots+p_n$.
\begin{enumerate}
\item Find the exact distribution for $S_n$.
\item Prove that the difference between $\mathbb{P}[S_n=k]$ and $\frac{\mu^ke^{-\mu}}{k!}$ is at most $p_1^2+\cdots+p_n^2$.
\end{enumerate}

\end{enumerate}

%\subsection{课后习题}
\subsection{Homework 04}
E5.1.1, E5.1.7, P5.1.9, E5.2.1, E5.2.3, P5.2.1.

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\newpage
\section{Applied Stochastic Processes - Lecture 05}

%\subsection{内容提要}%Markov Chains - Concepts and Examples
\subsection{Summary}
\begin{itemize}
\item  Poisson point process
\item  Waiting time and sojourn times
\item  Theorem (waiting time distribution)
\item  Theorem (sojourn time distribution)
\item  Theorem (uniform distribution of occurrence times)
\end{itemize}

%\subsection{例题讲解}
%\subsection{Contents}
\subsection{Examples}
\begin{enumerate}
\item The waiting time $W_n$ is the time of occurrence of the $n$th event. It is often convenient to set $W_0 = 0$.
Show that the waiting time $W_n$ has distribution $\Gamma(n,\lambda)$.

\item The sojourn time $S_n = W_{n+1} - W_n$ measures the duration that the Poisson process sojourns in state $n$.
Show that the sojourn time $S_n$ has distribution $Exp(\lambda)=\Gamma(1,\lambda)$. Furthermore, $S_1,S_2,\cdots,S_n$ are independent.

\item Let $\{X(t),t\ge 0\}$ be a Poisson process of rate $\lambda>0$. Show that for $0<u<t$, 
the conditional distribution of $(X(u)\mid X(t)=n)$ is binomial: 
\[ \mathbb{P}[X(u)=k\mid X(t)=n] = \binom{n}{k}\left(\frac{u}{t}\right)^k \left(1-\frac{u}{t}\right)^{n-k},\,\, 0\le k\le n. \]

\item Customers arrive at a facility according to a Poisson process of rate $\lambda$. Each customer pays \$1 on arrival.
The expected value of the total sum collected during the interval $(0, t]$ discounted back to time 0, is given by 
%\[ M=E[\sum\limits_{k=1}^{X(t)} e^{-\beta W_k} ]. \]
\[ M=\mathbb{E} \left[ e^{-\beta W_1} + e^{-\beta W_2} +\cdots + e^{-\beta W_{X(t)}} \right]. \]
Evaluate the mean total discounted sum $M$ by conditioning on $X(t) = n$. 

\item Suppose that alpha particles appear from a certain radioactive material in time according to a Poisson process of intensity $\lambda$. Each particle exists for a random duration $Y_k$ independently and is then annihilated. Suppose the common distribution function is $G(y) = \mathbb{P}[Y_k\le y]$. 
Let $M(t)$ count the number of alpha particles existing at time $t$.
Evaluate the probability distribution of $M(t)$ under the condition that $M(0) = 0$.

\end{enumerate}

%\subsection{课后习题}
\subsection{Homework 05}
E5.3.1, E5.3.3, E5.3.7, P5.3.1, E5.4.1, E5.4.3.

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\newpage
\section{Applied Stochastic Processes - Lecture 06}

%\subsection{内容提要}%Markov Chains - Concepts and Examples
\subsection{Summary}
\begin{itemize}
\item  Postulates for the Poisson process
\item  System of differential equations
\item  Pure birth process 
\item  The Yule process 
\end{itemize}

%\subsection{例题讲解}
\subsection{Contents}
%\subsection{Examples}
\begin{enumerate}
\item A continuous time, discrete state, Markov process is a family of random variables $\{ X(t); 0 \le t < \infty \}$ where the possible values of $X(t)$ are the nonnegative integers. 
A Markov process $\{X(t)\}$ is stationary if the transition probability function
\[ p_{ij}(t) = P[ X(t+u)=j \mid X(u)=i ], \,\, i,j = 0,1,2,\cdots. \]
is independent of $u\ge 0$. 
Explain that homogeneous Poisson processes are stationary continuous time Markov processes, and find $p_{ij}(t)$.

\item Consider a sequence of positive numbers $\{\lambda_k\}$. A pure birth process is a Markov process $\{X(t)\}$ which records the number of births in the time interval $(0, t]$, satisfying $X(0) = 0$ and the following postulates:
\begin{enumerate}
\item[(i)] $P[X(t + h) - X(t) = 1 \mid X(t) = k ] = \lambda_k h + o_{1,k}(h)$.
\item[(ii)] $P[X(t + h) - X(t) = 0 \mid X(t) = k ] = 1 - \lambda_k h + o_{2,k}(h)$. 
\item[(iii)] $P[X(t + h) - X(t) < 0 \mid X(t) = k ] = 0,\,\, (k \ge 0)$.
\end{enumerate}
Verify that there are differential equations for the probabilities $p_n(t) = P[X(t)=n]$.
%It is the probability that there are $n$ births during $(0,t]$.
\begin{eqnarray*}
p_0'(t) &=& - \lambda_0 p_0(t), \,\, p_0(0) =1; \\
p_n'(t) &=& -\lambda _n p_n(t) + \lambda_{n-1}p_{n-1}(t), \,\, p_n(0)=0,\,\, n\ge 1.
\end{eqnarray*}

\item Prove that the above pure birth process has the following properties.
\begin{enumerate}
\item 
%\begin{eqnarray*}
$p_0(t) = e^{-\lambda_0 t}, t> 0; \,\, p_n(t) = \lambda_{n-1}e^{-\lambda_n t} \int_0^t e^{\lambda_n x} p_{n-1}(x)dx,\,\, n=1,2,\cdots.$
%\end{eqnarray*}
\item The time $S_n$ between consecutive births has distribution  $Exp(\lambda_n)$.
\item The expected time before the population becomes infinite is $\sum_n (1/\lambda_n)$.
\item The probability that $X(t)=\infty$ is $1-\sum_{n=0}^{\infty} p_n(t)$.
\end{enumerate}

\item Under certain conditions, the infinitesimal probability of a birth at a given instant is proportional to the population size at that time. This example is known as the Yule process. It is a pure birth process with the infinitesimal parameters $\lambda_n=n\beta$.
\begin{enumerate}
\item Verify that the solutions are $p_n(t) = e^{-\beta t}(1-e^{-\beta t} )^{n-1},\,\, n\ge 1$.
\item Compare the Yule process with the deterministic population growth model represented by the differential equation $dy/dt = \alpha y$. 
\end{enumerate}

\end{enumerate}

%\subsection{课后习题}
\subsection{Homework 06}
E6.1.1, E6.1.2, P6.1.1, P6.1.2.

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\newpage
\section{Applied Stochastic Processes - Lecture 07}

%\subsection{内容提要}%Markov Chains - Concepts and Examples
\subsection{Summary}
\begin{itemize}
\item  Renewal process
\item  Replacements of lightbulbs
\item  Renewal function
\item  Wald identity
\item  Block replacement
\end{itemize}

%\subsection{例题讲解}
\subsection{Contents}
\begin{enumerate}
\item A renewal process $\{N(t); t \ge 0\}$ is a nonnegative integer valued stochastic process that registers the successive occurrences of an event during the time interval $(0, t]$, where the times between consecutive events are positive iid random variables $\{X_k\}$.
%often representing the lifetimes of some units successively placed into service.
Explain that homogeneous Poisson processes are renewal processes.

\item Explain the following link between the waiting time process $\{W_n=X_1+\cdots+X_n\}$ and the renewal counting process $\{N(t)\}$. 
\begin{center}
$N(t)\ge k$ if and only if $W _k\le t$.
\end{center}

\item Let $\{N(t);t\ge 0\}$ be a renewal process with inter-occurrence distribution $F$. 
Let $F_k$ be the $k$th convolution of $F$. 
The renewal function $M(t) = \mathbb{E}[N(t)]$ is the expected number of renewals for the time duration $(0, t]$.  
Prove the following formula. 
\[ M(t) = \sum_{k=1}^{\infty} F_k(t). \]

\item Let $\{N(t); t \ge 0\}$ be a renewal process with inter-occurence times $\{X_k\}$. 
Let $W_k$ be the waiting time until the occurrence of the $k$th event. 
Prove the following formula. % Wald Identity.
\[ \mathbb{E}[W_{N(t)+1} ] = \mathbb{E}[X_1+\cdots+W_{N(t)+1}] = \mathbb{E}[X_1]\mathbb{E}[N(t)+1]. \]
%Explain that in general $E[W_{N(t)} ] \neq E[X_1]E[N(t)]$.

\item Consider a light bulb whose life, measured in discrete units, is a random variable $X$ with distribution 
$\mathbb{P}[X = k] = p_k$ for $k = 1, 2, \cdots$. 
%Assuming that one starts with a fresh bulb and that each bulb is replaced by a new one when it burns out.
%The expected number of replacements up to time $n$ is: \[ M(n) = \mathbb{E}[N(n)]. \]
A block replacement policy attempts to reduce cost by fixing a block period $K$ and then replacing bulbs as they fail during periods $1, 2, ... , K - 1$, and replacing all bulbs, failed or not, in period $K$. 

If $c_1$ is the per bulb block replacement cost and $c_2$ is the per bulb failure replacement cost $(c_1 < c_2)$. 
%, then the mean total cost during the block replacement cycle is $c_1 + c_2\mathbb{E}[N(K-1)]$.
What is the optimal policy $K$ ?

\end{enumerate}

%\subsection{课后习题}
\subsection{Homework 07}
E7.1.2, E7.1.3, E7.2.1, E7.2.3, P7.2.1.

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\newpage
\section{Applied Stochastic Processes - Lecture 08}

%\subsection{内容提要}%Markov Chains - Concepts and Examples
\subsection{Summary}
\begin{itemize}
\item  A little history
\item  The Brownian motion stochastic process
\item  The diffusion equation
\item  The definition of Brownian motion
\item  The maximum variable and the reflection principle
\end{itemize}

%\subsection{例题讲解}
\subsection{Contents}
\begin{enumerate}
\item Tell a little history of Brownian motion, about Robert Brown, Louis Bachelier, Albert Einstein, and Norbert Wiener.

\item A Brownian motion with diffusion coefficient $\sigma^2$ is a stochastic process $\{B(t); t\ge 0\}$ satisfying the following properties.
Draw a few sample paths of this process. 
\begin{enumerate}
\item[(i)] Every increment $B(s + t) - B(s)$ is normally distributed $N(0,\sigma^2t)$. %with mean zero and variance $\sigma^2t$.
\item[(ii)] The increments over disjoint time intervals are independent. 
%For every pair of disjoint time intervals $(t_1, t_2]$, $(t_3,t_4]$, the increments $B(t_4)-B(t_3)$ and $B(t_2)-B(t_1)$ are independent random variables, and similarly for $n$ disjoint time intervals.
\item[(iii)] $B(0) = 0$, and $B(t)$ is continuous as a function of $t$.
\end{enumerate}

\item Let $\{B(t); t\ge 0\}$ be a Brownian motion. 
Let $p(y, t\mid x)$ be the transition probability density function of $B(t_0+t)=y$ given that $B(t_0)=x$. 
\begin{enumerate}
\item As the conditional pdf of a random variable, we have the following two properties. Explain its similarity to the transition probability matrix of Markov chains. 
\[ p(y,t\mid x) \ge 0,\hspace{0.5cm} \int_{-\infty}^{\infty} p(y,t\mid x) dy =1. \]
%\item During an infinitesimal time interval, we have $\lim\limits_{t\to 0} p(y,t \mid x)=0$ for $y\neq x$. 
\item Verify that the diffusion equation $\frac{\partial p}{\partial t} = \frac{1}{2}\sigma^2 \frac{\partial^2 p}{\partial x^2}$ has the following solution. 
\[ p(y,t\mid x) = \frac{1}{\sqrt{2\pi \sigma^2t}} \exp\left[ -\frac{(y-x)^2}{2\sigma^2t}\right]. \]
%What is the coefficient $\sigma^2$ ?
\end{enumerate}

\item Suppose that the summands $\xi_k$ have the distribution in which $\xi_k=\pm 1$, each with probability $1/2$. 
Then the partial sum process $S_n=\xi_1+\cdots+\xi_n$ is a simple random walk. Given any two positive integers $a$ and $b$.  

Explain that the probability that $S_n$ reaches $-a$ before $b$ is the same probability that a standard Brownian motion $B(t)$ reaches $-a$ before $b$. Compute this probability. %And it is $b/(a+b)$.

\item Let $\{B(t); t\ge 0\}$ be a standard Brownian motion. Define the maximum process $\{M(t)\}$ and the hitting time $\tau_x$ in the following way. Compute their distributions. 
\[ M(t)=\max\{B(u)\mid 0\le u\le t\},\hspace{0.3cm} \tau_x=\min\{u\mid B(u)=x\}. \]


\end{enumerate}

%\subsection{课后习题}
\subsection{Homework 08}
E8.1.1, E8.1.2, E8.1.4, P8.1.1, P8.1.3, E8.2.1.

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\end{document}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%




