\documentclass[a4paper]{ctexart}
% useful packages.
\usepackage{amsfonts}
\usepackage{amsmath}
\usepackage{amssymb}
\usepackage{amsthm}
\usepackage{enumerate}
\usepackage{graphicx}
\usepackage{multicol}
\usepackage{fancyhdr}
\usepackage{layout}
\newtheorem{Definition}{\hspace{2em}定义}
\newtheorem{Example}{\hspace{2em}例}
\newtheorem{Thm}{\hspace{2em}定理}
\newtheorem{Lem}{\hspace{2em}引理}
\newtheorem{cor}{\hspace{2em}推论}
% some common command
\newcommand{\dif}{\mathrm{d}}
\newcommand{\avg}[1]{\left\langle #1 \right\rangle}
\newcommand{\difFrac}[2]{\frac{\dif #1}{\dif #2}}
\newcommand{\pdfFrac}[2]{\frac{\partial #1}{\partial #2}}
\newcommand{\OFL}{\mathrm{OFL}}
\newcommand{\UFL}{\mathrm{UFL}}
\newcommand{\fl}{\mathrm{fl}}
\newcommand{\op}{\odot}
\newcommand{\cp}{\cdot}
\newcommand{\Eabs}{E_{\mathrm{abs}}}
\newcommand{\Erel}{E_{\mathrm{rel}}}
\newcommand{\DR}{\mathcal{D}_{\widetilde{LN}}^{n}}
\newcommand\tbbint{{-\mkern -16mu\int}}
\newcommand\tbint{{\mathchar '26\mkern -14mu\int}}
\newcommand\dbbint{{-\mkern -19mu\int}}
\newcommand\dbint{{\mathchar '26\mkern -18mu\int}}
\newcommand\bint{
{\mathchoice{\dbint}{\tbint}{\tbint}{\tbint}}
}
\newcommand\bbint{
{\mathchoice{\dbbint}{\tbbint}{\tbbint}{\tbbint}}
}
\title{Homework\# 1}
\author{Shuang Hu}
\begin{document}
\maketitle
\section*{P13 Problem3}
\begin{proof}
Proof by induction. First, consider the case $k=1$. We can see:
$$
LHS=x_{1}+x_{2}+\cdots+x_{n}.
$$
set $\beta_{i}=(0,0,\cdots,1,\cdots,0)$ where $1$ comes in the i-th element, we can see:
$$
RHS=\sum_{i=1}^{n}x^{\beta_{i}}=\sum_{i=1}^{n}x_{i}=LHS.
$$
So the equation holds when $k=1$. Assume the result holds for $k=k_{0}$, i.e.
\begin{equation}
    (x_{1}+\cdots+x_{n})^{k_{0}}=\sum_{|\alpha|=k_{0}}\begin{pmatrix}
        |\alpha|\\
        \alpha\\
    \end{pmatrix}x^{\alpha}.
\end{equation}
consider the case for $k_{0}+1$, i.e.
\begin{equation}
    \begin{aligned}
        (x_{1}+\cdots+x_{n})^{k_{0}+1}&=\sum_{|\alpha|=k_{0}}\begin{pmatrix}
            |\alpha|\\
            \alpha\\
        \end{pmatrix}x^{\alpha}\\
        &=\sum_{|\alpha|=k_{0}}\sum_{j=1}^{n}\binom{|\alpha|}{\alpha}x_{j}x^{\alpha}\\
        &=\sum_{|\beta|=k_{0}+1}\sum_{j=1}^{n}\binom{|\beta_{j}|}{\beta_{j}}x^{\beta}.\\
    \end{aligned}
\end{equation}
If $\beta=(x_{1},\cdots,x_{n})$, $\beta_{j}=(x_{1},\cdots,x_{j}-1,\cdots,x_{n})(x_{j}\ge 1)$.
It suffices to show that
\begin{equation}
    \binom{|\beta|}{\beta}=\sum_{j=1}^{n}\binom{|\beta_{j}|}{\beta_{j}}.
\end{equation}
We can see:
\begin{equation}
    \begin{aligned}
        RHS&=k_{0}!\sum_{j=1}^{n}\frac{1}{\beta_{1}!\cdots(\beta_{j}-1)!\cdots\beta_{n}!}\\
        &=k_{0}!\frac{\beta_{1}+\cdots+\beta_{n}}{\beta_{1}!\cdots\beta_{n}!}\\
        &=LHS.\\
    \end{aligned}
\end{equation}
So $(3)$ is true. By induction, the multinomial theorem is true.
\end{proof}
\section*{P13 Problem4}
\begin{proof}
If $|\alpha|=1$, by the formula of partial derivative, we can see:
$$
\pdfFrac{(uv)}{x_{i}}=v\pdfFrac{u}{x_{i}}+u\pdfFrac{v}{x_{i}}.
$$
So $D^{\alpha}(uv)=uD^{\alpha}v+vD^{\alpha}u$ when $|\alpha|=1$.

Assume the result is true for $|\alpha|=k_{0}$, consider $|\beta|=k_{0}+1$, mark 
$\beta=(k_{1},\cdots,k_{n})$, WLOG, assume $k_{1}\ge 1$, $\alpha=(k_{1}-1,\cdots,k_{n})$. Then:
\begin{equation}
    \begin{aligned}
        D^{\beta}(uv)&=D^{\alpha}(vu_{1}+uv_{1})\\
        &=D^{\alpha}(u_{1}v)+D^{\alpha}(v_{1}u)\\
        &=\sum_{\gamma\le\alpha}\binom{\alpha}{\gamma}D^{\gamma}u_{1}D^{\alpha-\gamma}v
        +\sum_{\gamma\le\alpha}\binom{\alpha}{\gamma}D^{\gamma}uD^{\alpha-\gamma}v_{1}\\
        &=\sum_{\gamma\le\alpha}\left[\binom{\alpha}{\gamma}D^{\gamma'}uD^{\alpha'-\gamma'}v
        +\binom{\alpha}{\gamma}D^{\gamma}uD^{\alpha-\gamma'}v\right].
    \end{aligned}
\end{equation}
where $\gamma'=(\gamma_{1}+1,\gamma_{2},\cdots,\gamma_{n})$, $\alpha'=(\alpha_{1}+1,\cdots,\alpha_{n})$.

Finally, consider the coefficient in front of $D^{\gamma'}uD^{\alpha'-\gamma'}v$, it is:
$$
\binom{\alpha}{\gamma}+\binom{\alpha}{\gamma'}=\binom{\alpha'}{\gamma'}.
$$
By induction, the result is true.
\end{proof}
\section*{P13 Problem5}
\begin{proof}
    Set $g(t)=f(tx)$. First, we should prove a lemma:
    \begin{equation}
    g^{(k)}(t)=\sum_{|\alpha|=k}\frac{k!}{\alpha!}x^{\alpha}D^{\alpha}f.
    \end{equation}
    For $k=1$, we can see:
    $g'(t)=\difFrac{f(tx_{1},\cdots,tx_{n})}{t}=\sum_{i=1}^{n}x_{i}f_{x_{i}}$, satisfies $(6)$.

    Assume $(6)$ is true for $k=k_{0}$, consider $k=k_{0}+1$, we can see:
    \begin{equation}
        \begin{aligned}
            g^{(k+1)}(t)&=\sum_{|\alpha|=k}\frac{k!}{\alpha!}x^{\alpha}(D^{\alpha}f)'\\
            &=\sum_{|\alpha|=k}\frac{k!}{\alpha!}x^{\alpha}\sum_{i=1}^{n}\frac{\partial^{\beta_{i}}f}
            {\partial x_{1}^{\alpha_{1}}\cdots\partial x_{i}^{\alpha_{i}+1}\cdots\partial x_{n}^{\alpha_{n}}}\\
            &=\sum_{|\beta|=k+1}\frac{(k+1)!}{\beta!}x^{\beta}D^{\beta}f.
        \end{aligned}
    \end{equation}
    In this equation, $\beta_{i}=(\alpha_{1},\cdots,\alpha_{i}+1,\cdots,\alpha_{n}).$

    So, by induction, we can see $g^{(k)}(t)=\sum_{|\alpha|=k}\frac{k!}{\alpha!}x^{\alpha}D^{\alpha}f.$

    By Taylor extension:
    $$
    g(t)=\sum_{i=0}^{k}\frac{g^{(i)}(0)}{i!}t^{i}+\frac{g^{(k+1)}(\xi)}{(k+1)!}t^{k+1}.
    $$

    So $f(x)=g(1)=\sum_{|\alpha|\le k}\frac{1}{\alpha!}D^{\alpha}f(0)x^{\alpha}+O(|x|^{k+1})$.
\end{proof}
\section*{P85 Problem4}
\begin{proof}
    Define $u_{\epsilon}=u+\epsilon|x|^{2}$, we can see:
    \begin{equation}
        \Delta u_{\epsilon}=\Delta u+2n\epsilon=2n\epsilon\text{ on }U.
    \end{equation}
    Assume $u_{\epsilon}$ gets its maximal in $U$, assume $u(x_{0})=\max_{x\in U}u(x)$, for 
    $u(x_{0})$ is maximal, it's clear that $\nabla^{2}u(x_{0})$ is semi-negative definite, 
    which means $\Delta u(x_{0})\le 0$, contradict!

    So:
    \begin{equation}
        u(x)\le u_{\epsilon}(x)\le\max_{\partial U}u_{\epsilon}(x)\le\max_{x\in\partial U}u+\epsilon C.\text{(C is a constant.)}
    \end{equation}
    Set $\epsilon\rightarrow 0^{+}$, we can see $u(x)\le\max_{\partial U}u(x)$ $\forall x\in U$.
\end{proof}
\section*{P85 Problem5}
(a).Set
\begin{equation}
    h(r)=\bbint_{\partial B(x,r)}v(y)\dif S(y).
\end{equation}
Then:
\begin{equation}
    \begin{aligned}
        h'(r)&=\bbint_{\partial B(0,1)}Dv(x+rz)\cdot z\dif S(z)\\
        &=\bbint_{\partial B(x,r)}Dv(y)\cdot\frac{y-x}{r}\dif S(y)\\
        &=\bbint_{\partial B(x,r)}Du\cdot n\dif S(y)\\
        &=\frac{r}{n}\bbint_{B(x,r)}\Delta u\dif y\ge0.\\
    \end{aligned}
\end{equation}
What's more:
\begin{equation}
    \begin{aligned}
        \int_{B(x,r)}v\dif y&=\int_{0}^{r}\int_{\partial B(x,\tau)}v\dif S(y)\dif\tau\\
        &=\int_{0}^{r}n\alpha(n)t^{n-1}h(t)\dif t\\
        &\ge v(x)\int_{0}^{r}n\alpha(n)t^{n-1}\dif t\\
        &=v(x)\alpha(n)r^{n}.
    \end{aligned}
\end{equation}
So $v(x)\le\bbint_{B(x,r)}v\dif y$.

(b).Assume $v(x)$ gets its maximal in $U$, we can mark the set:
\begin{equation}
    E:=\{x\in U:v(x)=\max_{\bar{U}v}\}.
\end{equation}
It's trivial that $E$ is a closed set related to $U$. In the other hand, by $(a)$, we can see: 
if $x_{0}\in E$, $\exists\delta$ s.t. $B(x_{0},\delta)\subset E$. So $E$ is an open set.

So $E$ is clopen. U is an open region, so $E=U$, in this condition $v$ is a constant function, 
satisfy the result.

(c).
\begin{equation}
    \frac{\partial^{2}v}{\partial x_{i}^{2}}=\phi''u(\pdfFrac{u}{x_{i}})^{2}+\phi'(u)\pdfFrac{^{2}u}{x_{i}^{2}}.
\end{equation}
As $\phi$ is convex, we can see $\phi''(x)\ge 0$. So:
\begin{equation}
    \Delta v\ge\phi'(u)\Delta u=0.
\end{equation}
Which means that $v$ is subharmonic.

(d).
\begin{equation}
    \pdfFrac{^{2}v}{x_{i}^{2}}=2\sum_{j=1}^{n}(u_{x_{i}x_{j}})^{2}+2\sum_{j=1}^{n}u_{x_{j}}u_{x_{i}x_{i}x_{j}}
\end{equation}
As $\Delta u=0\Rightarrow\forall j,\Delta u_{x_{j}}=0$. So:
\begin{equation}
    \Delta v=2\sum_{i,j=1}^{n}(u_{ij})^{2}\ge 0\Rightarrow -\Delta v\le0.
\end{equation}
So $v$ is subharmonic.
\section*{P86 Problem6}
\begin{proof}
Set $F:=\max_{\bar{U}}|f|$, $G:=\max_{\partial U}|g|$, $v:=u+\frac{|x|^{2}}{2n}F$, $w:=-u+\frac{|x|^{2}}{2n}F$. 
We can see $v$ and $w$ both subharmonic, which means:
$$
u\le G-C_{1}F,u\ge C_{2}F-G.
$$
So $|u|\le C(G+F)$.
\end{proof}
\end{document}