%!TEX program = xelatex
\documentclass[t,12pt,aspectratio=169]{beamer} % 16:9 宽屏比例，适合现代投影
%\usepackage{ctex} % 中文支持
\usepackage{amsmath, amsthm, amssymb, bm} % 数学公式与符号
\usepackage{graphicx}
\usepackage{hyperref}
\usepackage{color}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% 设置段落间距
\usepackage{setspace}
\onehalfspacing
\setlength{\parskip}{1em}  % 增加段落之间的间距为1em

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% 每页增加与上面标题行的距离
\addtobeamertemplate{frametitle}{}{\vspace*{0.7em}}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\usetheme{Madrid} % 主题设置（推荐简洁风格）
\usecolortheme{default} % 可选：seahorse, beaver, dolphin 等

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% 信息设置
\title{Chapter 04: Jacobian Conjecture}
\author{SCC ET AL}
%\institute[XX大学]{XX大学\quad 数学与统计学院\quad 数学与应用数学专业}
%\date{2025年6月}

\begin{document}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% 封面页
\begin{frame}
  \titlepage
\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% 目录页
\begin{frame}{Contents}
  \tableofcontents
\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Section 0
%\section{INTRO.}
\begin{frame}[allowframebreaks]{intro. }
    
The Jacobian conjecture was proposed by O.H. Keller in 1939. 

It asks whether a polynomial endomorphism of $\mathbb{C}^n$ whose Jacobian is constant must be invertible. 

Despite its simple and reasonable statement, the conjecture has not been proved even in the two-dimensional case. 

In this chapter we show that this conjecture would follow if one could prove that every endomorphism of the Weyl algebra is an automorphism. 

The chapter opens with a discussion of polynomial maps, which will play a central role in the second part of the book. 

We shall return to the Jacobian conjecture in Chapter 19.

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Section 1
\section{Polynomial Maps}
\begin{frame}[allowframebreaks]{A. }

Let $F: K^n \to K^m$ be a map and $p$ a point of $K^n$. 

We say that $F$ is {\color{red}polynomial} if there exist $F_1, \ldots, F_m \in K[x_1, \ldots, x_n]$ such that $F(p) = (F_1(p), \ldots, F_m(p))$. 

A polynomial map is called an {\color{red}isomorphism} or a {\color{red}polynomial isomorphism} if it has an inverse which is also a polynomial map. 

It is not always the case that a bijective polynomial map has an inverse which is also polynomial. 

For an example where this does not occur see Exercise 5.1. 

However, if $K=\mathbb{C}$, every invertible polynomial map has a polynomial inverse. 

This is proved in [Bass, Connell and Wright 82; Theorem 2.1].

For the rest of the section we shall write $X,Y$ for the spaces $K^n$ and $K^m$, and $K[X], K[Y]$ for the polynomial rings $K[x_1, \ldots, x_n]$ and $K[y_1, \ldots, y_m]$.

A polynomial $g \in K[Y]$ may be identified with the function of $Y$ into $K$ which maps $p \in Y$ to $g(p)$. 

Clearly if $g=0$ as a polynomial, then it induces the zero function on $Y$. 

Since $K$ is a field of characteristic zero, the converse is also true: a polynomial which induces the zero function on $Y$ is identically zero. 

For a proof see Exercise 5.2. 

This identification is the key to the construction that follows.

Suppose that $F: X \to Y$ is a polynomial map. 

We may define a map,
\begin{equation}
F{\,}^\sharp : K[Y] \to K[X],
\end{equation}
by the formula $F{\,}^\sharp(g) = g \cdot F$, where $g \in K[Y]$. 

Note that the arrow gets reversed as we go from $F$ to $F{\,}^\sharp$. 

The map $F{\,}^\sharp$ is called the {\color{red}comorphism} of $F$. 

A routine calculation shows that $F{\,}^\sharp$ is a homomorphism of polynomial rings.

Let us calculate an example. 

Suppose that $n < m$ and that $F: X \to Y$ is the map
\begin{equation}
F(x_1, \ldots, x_n) = (x_1, \ldots, x_n, 0, \ldots, 0).
\end{equation}

Then the algebra homomorphism $F{\,}^\sharp: K[Y] \to K[X]$ maps a polynomial $g(y_1, \ldots, y_m)$ to $g(x_1, \ldots, x_n, 0, \ldots, 0)$.

We may turn the construction of the above paragraph inside out. 

Suppose that a ring homomorphism $\phi: K[Y] \to K[X]$ is given. 

Then we may use it to construct a polynomial map from $X$ to $Y$. 

Let $y_i$ be an indeterminate in $K[Y]$. 

Then $\phi(y_i)$ is a polynomial of $K[X]$. 

Now let $\phi_\sharp: X \to Y$ be the map whose coordinate functions are $\phi(y_1), \ldots, \phi(y_m)$. 

These two constructions are each other's inverse, as the next results show.


\textbf{1.1 Theorem.} Let $F: X \to Y$ be a polynomial map, then $(F{\,}^\sharp)_\sharp = F$. 

Furthermore, if $Z = K^r$ and $G: Y \to Z$ is another polynomial map, then $G \cdot F: X \to Z$ is a polynomial map and $(G \cdot F)^\sharp = F{\,}^\sharp \cdot G^\sharp$.

\textbf{Proof:} Let $y_i$ be an indeterminate in $K[Y]$. 

As a function $Y \to K$ we have that $y_i$ maps a point of $Y$ onto its $i$-th coordinate. 

Hence,
\begin{equation}
F{\,}^\sharp(y_i) = y_i \cdot F = F_i.
\end{equation}

Thus, the coordinate functions of $(F{\,}^\sharp)_\sharp$ are $F_1, \ldots, F_m$, which are the coordinate functions of $F$. 

Hence $(F{\,}^\sharp)_\sharp = F$.

It is clear that $G \cdot F$ is a polynomial function. 

Let $g \in K[Z] = K[z_1, \ldots, z_r]$. 

Then
\begin{equation}
(G \cdot F)^\sharp(g) = g \cdot (G \cdot F).
\end{equation}

Since the composition of maps is associative,
\begin{equation}
(G \cdot F)^\sharp(g) = (g \cdot G) \cdot F = F{\,}^\sharp(G^\sharp(g)),
\end{equation}
as required.

The converse of Theorem 1.1 is also true. 

We state it and leave the proof to the reader.


\textbf{1.2 Theorem.} If $\phi: K[Y] \to K[X]$ is a homomorphism of polynomial rings, then $(\phi_\sharp)^\sharp = \phi$. 

Furthermore, if $\psi: K[Z] \to K[Y]$ is another homomorphism, then
\begin{equation}
(\phi \cdot \psi)_\sharp = \psi_\sharp \cdot \phi_\sharp.
\end{equation}

The following result is an immediate consequence of Theorems 1.1 and 1.2.


\textbf{1.3 Corollary.} A polynomial map $F: X \to Y$ is an isomorphism if and only if $F{\,}^\sharp$ is an isomorphism.

The Jacobian conjecture, discussed in the next section, proposes a simple criterion to determine whether a polynomial map is an isomorphism.

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Section 2
\section{Jacobian Conjecture}
\begin{frame}[allowframebreaks]{B. }

Let $F: K^n \to K^n$ be a polynomial map. 

Denote by $J(F)$ its Jacobian matrix. 

This is the matrix whose $ij$ entry is $\partial F_i / \partial x_j$. 

If $F$ is an isomorphism, then it follows from the chain rule that $J(F)$ is an invertible matrix at every point of $K^n$. 

In particular the determinant $\Delta F = \det J(F)$ is an invertible polynomial, hence a constant.

Now suppose that $K=\mathbb{R}$ or $\mathbb{C}$. 

If $p \in K^n$ and $\Delta F(p) \neq 0$, then by the inverse function theorem there exists a neighbourhood $U$ of $p$ such that $F$ restricted to $U$ is invertible. 

This leads to the following question. 

If $\Delta F$ is non-zero everywhere on $K^n$, is there a function $G: K^n \to K^n$ which is the inverse of $F$? Two comments are in order. 

First, although $F$ always has an inverse in the neighbourhood of every point (by the inverse function theorem) it is not clear whether these inverses can be 'glued' together to produce an inverse in the whole of $K^n$. 

Secondly, even if the inverse exists, it may not be a polynomial map, as shown in Exercise 5.1. 

The Jacobian conjecture, first stated in [Keller 39], is a refinement of the above question.


\textbf{2.1 Jacobian Conjecture.} Let $F: K^n \to K^n$ be a polynomial map. 

If $\Delta F = 1$ on $K^n$ then $F$ has a polynomial inverse on the whole of $K^n$.

Let us see what happens if $n=1$. 

In this case, we have a map $F: K \to K$ which is determined by a polynomial $F(x)$ in one variable. 

The Jacobian matrix is the derivative $dF/dx$, and we are assuming that this is a constant.

Hence $F$ is a linear map, and consequently it has an inverse. 

This proves that the Jacobian conjecture holds for $n=1$. 

The fact that an invertible polynomial is linear is restricted to the one-dimensional case. 

For $n \geq 2$, an invertible polynomial map may have coordinate functions of any degree whatsoever; as shown by the examples of Exercise 5.3.

Despite many attempts to settle it, the Jacobian conjecture remains open for every $n \geq 2$. 

A number of results related to the Jacobian conjecture have accumulated over the years; for a very nice survey of some of these results see [Bass, Connell and Wright 82]. 

For example, it is known that the conjecture is false over fields of positive characteristic, see Exercise 5.4. 

On the positive side, if all the coordinate functions of $F$ have degree $\leq 2$, then the conjecture is true; see Exercise 5.5.

We now rephrase the Jacobian conjecture using comorphisms. 

Let $X = K^n$ and $K[X] = K[x_1, \ldots, x_n]$. 

The Jacobian Conjecture states that if $\Delta F = 1$ on $X$ then $F{\,}^\sharp$ is an invertible homomorphism of rings. 

Actually it is not hard to see that if $\Delta F$ is constant, then $F{\,}^\sharp$ is necessarily injective. 

We prove this in a little more generality.


\textbf{2.2 Lemma.} Let $F: X \to X$ be a polynomial map and suppose that $\Delta F \neq 0$ everywhere in $X$. 

Then $F{\,}^\sharp$ is injective.

Proof: Suppose that $F{\,}^\sharp$ is not injective, and choose the non-constant polynomial $g \in K[X]$ of smallest degree such that $F{\,}^\sharp(g) = 0$. 

Then $g(F) = 0$. 

Let $g_i = \partial g / \partial x_i$ and
\begin{equation}
\mathbf{v} = (g_1(F_1, \ldots, F_n), \ldots, g_n(F_1, \ldots, F_n)).
\end{equation}

Hence, by the chain rule,
\begin{equation}
\mathbf{v}(p) \cdot JF(p) = 0
\end{equation}
for every $p \in X$. 

Since
\begin{equation}
\Delta F(p) = \det JF(p) \neq 0,
\end{equation}

we conclude that $\mathbf{v}(p) = 0$ for every $p \in X$. 

Thus $g_i(F_1, \ldots, F_n) = 0$ for $1 \leq i \leq n$. 

Since $g$ is not constant, at least one of the $g_i$ must be non-zero. 

But $g_i$ has degree smaller than $g$, a contradiction.

Denote by $K[F_1, \ldots, F_n]$ the subalgebra of $K[X]$ generated by the coordinate functions of $F$. 

This is the image of the homomorphism $F{\,}^\sharp$. 

If $\Delta F = 1$ then $F{\,}^\sharp$ is injective by Lemma 2.2. 

Now assume that $K[F_1, \ldots, F_n] = K[X]$; then $F{\,}^\sharp$ is also surjective. 

Hence by Corollary 1.3, $F$ itself is bijective. 

Thus the Jacobian conjecture may be rephrased as follows.


\textbf{2.3 Jacobian Conjecture.} Let $F: K^n \to K^n$ be a polynomial map and assume that $\Delta F = 1$ in $K^n$. 

Then $K[F_1, \ldots, F_n] = K[x_1, \ldots, x_n]$.

In §4 we show that if every endomorphism of the Weyl algebra is an automorphism then the Jacobian conjecture holds. 

This follows an idea of L. Vaserstein and V. Katz; see [Bass, Connell and Wright 82, p.297]. 

The next section is a digression on results about derivations that will be required in §4.

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Section 3
\section{Derivations}
\begin{frame}[allowframebreaks]{C. }

In this section we study some properties of derivations that will be required in the next section. 

Let $D$ be a derivation of a $K$-algebra $S$. 

It follows from Leibniz's rule that the kernel of $D$ is a subring of $S$, it is called the {\color{red}ring of constants} of $D$. 

The derivation $D$ is {\color{red}locally nilpotent} if given $a \in S$, there exists $k \in \mathbb{N}$ such that $D^k(a) = 0$. 

Note that the derivations $\partial_1, \ldots, \partial_n$ are locally nilpotent in $K[x_1, \ldots, x_n]$, whilst $x_1 \partial_1$ is not.

Let $S$ be a ring and $D$ a locally nilpotent derivation. 

Define a map $\phi: S \longrightarrow S[x]$ by the rule
\begin{equation}
\phi(a) = \sum_{0}^{\infty} \frac{D^n(a)}{n!} x^n
\end{equation}

for every $a \in S$. 

Note that $\phi(a)$ belongs to $S[x]$ because $D$ is locally nilpotent. 

It is easy to check that $\phi$ is a ring homomorphism which satisfies
\begin{equation}
\phi \cdot D = \frac{d}{dx} \cdot \phi.
\end{equation}

We want to prove the following proposition from [Wright 81].

\textbf{Proposition 3.1.} Let $S$ be a $K$-algebra and $D_1, \ldots, D_n$ be commuting locally nilpotent derivations of $S$. 

Suppose that there exist $t_1, \ldots, t_n \in S$ such that $D_i(t_j) = \delta_{ij}$. 

Then

(1) $S = R[t_1, \ldots, t_n]$, where $R$ is the ring of constants with respect to $D_1, \ldots, D_n$,

(2) $t_1, \ldots, t_n$ are algebraically independent over $R$,

(3) $D_i = \partial / \partial t_i$ for $i=1, \ldots, n$.

The proposition is proved by induction. 

It is better to isolate the case $n=1$ in a lemma.

\textbf{Lemma 3.2.} Let $S$ be a $K$-algebra and $D$ a locally nilpotent derivation of $S$. 

Suppose that for some $t \in S$ one has $D(t)=1$. 

Then

(1) $S = R[t]$, where $R$ is the ring of constants of $S$,

(2) $t$ is algebraically independent over $R$,

(3) $D = d/dt$.

Proof: Put $\bar{S} = S/St$. 

Let $\rho: S \longrightarrow \bar{S}[x]$ be the composition of $\phi$ defined above and the projection $S[x] \longrightarrow \bar{S}[x]$. 

We want to show that $\rho$ is an isomorphism. 

Note that $\rho(t) = x$.

To prove that $\rho$ is surjective it is enough to prove that its image contains $\bar{S}$. 

Let $a \in S$. 

Denote by $\bar{a}$ its image in $\bar{S}$. 

Since $D$ is locally nilpotent, there exists $n \in \mathbb{N}$ such that $D^k(a) = 0$ for $k > n$. 

Thus,
\begin{equation}
\rho(a) = \sum_{0}^{n} \frac{\overline{D^i(a)}}{i!} x^i.
\end{equation}

If $n=0$, then $\rho(a) = \bar{a}$. 

If $n>0$ put $a_0 = a$ and define $a_{j+1} = a_j - D^{n-j}(a_j)t^{n-j}/(n-j)!$, for $j=1, \ldots, n$. 

It is easy to show, by induction on $j$, that $D^k(a_j) = 0$ for $k > n-j$ and that
\begin{equation}
\rho(a_j) = \sum_{0}^{n-j} \frac{\overline{D^i(a_j)}}{i!} x^i.
\end{equation}

Thus $\rho(a_n) = \bar{a}_n$. 

However, since $\bar{t}=0$, we have that $\rho(a_n) = \bar{a}$. 

Thus $\rho$ is surjective.

Let us prove that $\rho$ is injective. 

If not, then there exists a non-zero $a \in S$ such that $\rho(a) = 0$. 

Thus $D^k(a) \in tS$, for every $k \in \mathbb{N}$. 

Hence $a = a_1 \cdot t$, for some $a_1 \in S$. 

Since $\rho(t) = x$, we have that $\rho(a_1) = 0$. 

Thus $a_1 \in tS$ and $a = a_2 \cdot t^2$, for some $a_2 \in S$. 

Continuing this way we conclude that $t^n$ divides $a$ for all $n \geq 0$. 

But this is impossible, unless $a=0$. 

Indeed, $\phi$ maps $t$ to $t+x$. 

Thus if $t^n$ divides $a$, we also have that $\phi(t^n) = (t+x)^n$ divides $\phi(a)$ in the polynomial ring $S[x]$. 

Hence, if $a \neq 0$ we have that $\deg(\phi(a)) \geq n$ for every $n > 0$, which is clearly impossible. 

Thus $a=0$, as required.

We conclude that the homomorphism $\rho: S \longrightarrow \bar{S}[x]$ is an isomorphism. 

Since $\rho \cdot D = d/dx \cdot \rho$, we have that $R = \rho^{-1}(\bar{S})$. 

The result now follows if we recall that $\rho(t) = x$.

\textbf{Proof of Proposition 3.1:} We proceed by induction on the number $n$ of derivations. 

By Lemma 3.2, $S = R_1[t_1]$, where $R_1$ is the ring of constants of $D_1$. 

But $t_1$ is algebraically independent over $R_1$ and $D_1 = d/dt_1$. 

Since $D_1$ commutes with $D_i$ for $i>1$, we have that $D_i(R_1) \subseteq R_1$. 

Thus, by the induction hypothesis, $R_1 = R[t_2, \ldots, t_n]$, and the proposition follows.

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Section 4
\section{Automorphisms}
\begin{frame}[allowframebreaks]{D. }

We now return to the setup of the Jacobian conjecture. 

Let $X = K^n$. 

The rational function field of $K[X]$ will be denoted by $K(X)$. 

Let $F: X \to X$ be a polynomial map with coordinate functions $F_1, \ldots, F_n$. 

Assume that
\begin{equation}
\Delta = \Delta F \neq 0
\end{equation}
everywhere on $X$; a condition that is actually weaker than the one required by the Jacobian conjecture. 

Define a map $D_i: K(X) \to K(X)$ by
\begin{equation}
D_i(g) = \Delta^{-1} \det J(F_1, \ldots, F_{i-1}, g, F_{i+1}, \ldots, F_n).
\end{equation}

It is easy to check that $D_i$ is a $K$-linear map that satisfies Leibniz's rule. 

Hence $D_i$ is a derivation of $K(X)$. 

Now let $K[X, \Delta^{-1}]$ be the $K$-subalgebra of $K(X)$ of all rational functions whose denominator is a power of $\Delta$. 

Then $D_i$ restricts to a derivation of $K[X, \Delta^{-1}]$, since
\begin{equation}
D_i(\Delta^{-1}) = -\Delta^{-2} D_i(\Delta).
\end{equation}


\textbf{4.1 Lemma.} As derivations of $K[X, \Delta^{-1}]$ the $D_i$ satisfy:

(1) $D_i(F_j) = \delta_{ij}$.

(2) The $D_i$ commute pairwise.

Proof: We prove (2); since (1) follows easily from properties of the determinant. 

Note first that $\Delta(0) \neq 0$. 

Thus $\Delta$ is invertible as a power series and $K[X, \Delta^{-1}] \subseteq K[[X]]$. 

On the other hand, $\Delta \cdot D_i$ is a derivation of $K[x_1, \ldots, x_n]$ which can be extended to a derivation on the power series ring $K[[X]] = K[[x_1, \ldots, x_n]]$; see Exercise 5.9. 

Since $\Delta$ is invertible as a power series, then $D_i$ can also be extended to a derivation of $K[[X]]$.

Put $B = [D_i, D_j]$. 

We want to show that $B=0$ on $K[X, \Delta^{-1}]$. 

It is enough to show that $B=0$ on the power series ring $K[[X]]$. 

Since the commutator of two derivations is a derivation (see Exercise 5.8), we have that $B$ is a derivation of $K[[X]]$. 

Moreover $B(F_k)=0$, for $1 \leq k \leq n$; and so $B$ is zero in the subalgebra $K[F_1, \ldots, F_n]$. 

But $F_1, \ldots, F_n$ are algebraically independent, by Lemma 2.2. 

Hence we may consider $B$ as a derivation on the power series ring $K[[F_1, \ldots, F_n]]$. 

By (1), $B$ is zero on $K[[F_1, \ldots, F_n]]$. 

For $1 \leq i \leq n$ let $a_i = F_i(0)$. 

The Jacobian matrices of $(F_1 - a_1, \ldots, F_n - a_n)$ and $F$ coincide. 

Since the latter is invertible in $K[[x_1, \ldots, x_n]]$, we conclude from the local inversion theorem (see Appendix 2) that
\begin{equation}
K[[x_1, \ldots, x_n]] = K[[F_1 - a_1, \ldots, F_n - a_n]] = K[[F_1, \ldots, F_n]].
\end{equation}

Thus $B$ is zero on $K[[x_1, \ldots, x_n]]$, as required.

We now return to the Jacobian conjecture. 

The next theorem is of the type: a conjecture implies a conjecture! To simplify the proof we introduce the following notation. 

Let $a \in A_n$. 

The map $\mathrm{ad}_a: A_n \to A_n$ is defined by
\begin{equation}
\mathrm{ad}_a(b) = [a,b].
\end{equation}

This is a $K$-linear map, but it is not a $K$-algebra homomorphism.


\textbf{4.2 Theorem.} Let $F: K^n \to K^n$ be a polynomial map and assume that $\Delta F = 1$ everywhere on $K^n$. 

If every endomorphism of $A_n$ is an automorphism, then the Jacobian conjecture holds.

Proof: Since $\Delta F = 1$, it follows from Lemma 4.1 that $D_1, \ldots, D_n$ are derivations of $K[X]$ which satisfy
\begin{equation}
[D_i, F_j] = D_i(F_j) = \delta_{ij} \quad \text{and} \quad [D_i, D_j] = 0,
\end{equation}
for $1 \leq i,j \leq n$. 

By Ch.1 §3, there exists an endomorphism $\phi: A_n \to A_n$ such that $\phi(x_i) = F_i$ and $\phi(\partial_i) = D_i$, for $1 \leq i \leq n$. 

Note that for $b \in A_n$,
\begin{equation}
\deg(\mathrm{ad}_{\partial_i}(b)) = \deg[\partial_i, b] \leq \deg b - 1.
\end{equation}

Thus given $b \in A_n$, there exists $k \in \mathbb{N}$ such that $(\mathrm{ad}_{\partial_i})^k(b) = 0$. 

Since
\begin{equation}
\phi(\mathrm{ad}_{\partial_i}(b)) = \mathrm{ad}_{D_i}\phi(b),
\end{equation}

we have that $(\mathrm{ad}_{D_i})^k(\phi(b)) = 0$. 

Assuming that $\phi$ is an automorphism, we conclude that $D_i$ is locally nilpotent. 

It then follows by Proposition 3.1 that $K[F_1, \ldots, F_n] = K[x_1, \ldots, x_n]$, which is the Jacobian conjecture as stated in 2.3.

Once again let us observe that it is not known whether every endomorphism of $A_n$ is an automorphism. 

This conjecture first appeared in print as 'Problème 11.1' in [Dixmier 68]. 

Note, however, that every endomorphism of $A_n$ is injective, by Corollary 2.2.2. 

Thus to prove the conjecture it is enough to show that every endomorphism of $A_n$ is surjective. 

Unfortunately this is not known even for $n=1$.


\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Section 5
\section{Exercises}
\begin{frame}[allowframebreaks]{E. }

\textbf{5.1} Let $F: \mathbb{R}^2 \to \mathbb{R}^2$ be the polynomial map defined by $F(x,y) = (x^3 + x, y)$. 

Show that $\Delta F \geq 1$ in $\mathbb{R}^2$, but that it is not constant. 

Show that $F$ has an inverse but that it is not polynomial.

\newpage

\textbf{5.2} Let $g \in K[X] = K[x_1, \ldots, x_n]$. 

Show that $g(p) = 0$ for every $p \in K^n$ if and only if $g = 0$ as a polynomial.

Hint: Proceed by induction on $n$. 

Write $g$ in the form $\sum_{0}^{k} g_i x_n^i$, where $g_i \in K[x_1, \ldots, x_{n-1}]$. 

Suppose that there exists $q \in K^{n-1}$ such that $g_i(q) \neq 0$ for some $i$. 

Then
\begin{equation}
g(q, x_n) = \sum_{0}^{k} g_i(q) x_n^i.
\end{equation}
is a polynomial in one variable with infinitely many roots.

\newpage

\textbf{5.3} Let $d$ be a positive integer. 

Consider the polynomial map $F: \mathbb{R}^2 \to \mathbb{R}^2$ defined by $F(x,y) = ((x-y)^d + y - 2x, (x-y)^d - x)$. 

Show that $\Delta F = 1$ everywhere in $\mathbb{R}^2$ and that $F$ has a polynomial inverse.

\newpage

\textbf{5.4} Let $k$ be a field of characteristic $p > 0$. 

Consider the map $F: k^n \to k^n$ defined by

\begin{equation}
F(x_1, \ldots, x_n) = (x_1 + x_1^p, x_2, \ldots, x_n).
\end{equation}

Show that $J(F)$ is the identity matrix but that $F$ cannot be invertible.

Hint: $x_1 + x_1^p$ is not an irreducible polynomial.

\newpage

\textbf{5.5} Let $F: \mathbb{C}^n \to \mathbb{C}^n$ be a polynomial map. 

Suppose that the coordinate functions of $F$ have degree at most 2. 

Show that if $\Delta F \neq 0$ everywhere in $\mathbb{C}^n$, then
\begin{equation}
F(p) - F(q) = JF\left(\frac{p+q}{2}\right)(p-q).
\end{equation}

Use this to prove that $F$ must be injective.

This is enough to prove the Jacobian conjecture for quadratic maps because by [Bass, Connell and Wright 82, Theorem 2.1] an injective polynomial map of $\mathbb{C}^n$ to itself must be bijective.

\newpage

\textbf{5.6} Which of the following derivations are locally nilpotent in $K[x_1, x_2]$?

(1) $x_1 \partial_1 + x_2 \partial_2$

(2) $x_1 \partial_1 + \partial_2$

\newpage

\textbf{5.7} Check in detail that the $D_i$ defined in §4 are derivations of $K(X)$.

\newpage

\textbf{5.8} Let $R$ be a commutative ring and $D, D'$ be derivations of $R$. 

Show that $[D, D']$ is a derivation of $R$.

\newpage

\textbf{5.9} Let $D$ be a derivation of $K[x_1, \ldots, x_n]$ that is zero on the constants. 

Show that:

(1) $D$ can be extended to the power series ring $K[[x_1, \ldots, x_n]]$.

(2) If $\Delta$ is a power series such that $\Delta(0) \neq 0$ then $\Delta^{-1} \cdot D$ is a derivation of the power series ring $K[[x_1, \ldots, x_n]]$.


\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\end{document}


