ARG J_S_V=jdk8.472-2.12.21
FROM registry.cn-hangzhou.aliyuncs.com/xiaoyilin/scala212:${J_S_V}
ARG H_V
ARG S_V
ADD hadoop-${H_V}.tar.gz spark-${S_V}-bin-without-hadoop.tgz /usr/local/
WORKDIR /usr/local/
RUN mv /usr/local/hadoop-${H_V} /usr/local/hadoop${H_V} && \
mv /usr/local/spark-${S_V}-bin-without-hadoop /usr/local/spark${S_V} && \
sed -i 's/deb.debian.org/mirrors.tuna.tsinghua.edu.cn/g' /etc/apt/sources.list.d/debian.sources && \
apt-get -y update && apt-get -y upgrade && apt-get -y install procps && \
cp /usr/local/spark${S_V}/conf/spark-env.sh.template  /usr/local/spark${S_V}/conf/spark-env.sh  && \
cp /usr/local/spark${S_V}/conf/workers.template  /usr/local/spark${S_V}/conf/workers && \
sed -i '$a export SPARK_DIST_CLASSPATH=$(hadoop classpath)' /usr/local/spark${S_V}/conf/spark-env.sh && \
rm -rf /var/lib/apt/lists/*
ENV TZ=Asia/Shanghai
ENV  HADOOP_HOME=/usr/local/hadoop${H_V}
ENV  SPARK_HOME=/usr/local/spark${S_V}
ENV  PATH=$PATH:$HADOOP_HOME/bin:$SPARK_HOME/bin:$SPARK_HOME/sbin


# JAVA8+Scala2.12
# docker build --build-arg J_S_V=jdk8.472-2.12.21 --build-arg H_V=3.4.2 --build-arg S_V=3.5.7 -t registry.cn-hangzhou.aliyuncs.com/xiaoyilin/spark-local:3.5.7-h3.4.2 .
# docker push registry.cn-hangzhou.aliyuncs.com/xiaoyilin/spark-local:3.5.7-h3.4.2
# docker run --rm -it -p 4040:4040 registry.cn-hangzhou.aliyuncs.com/xiaoyilin/spark-local:3.5.7-h3.4.2 bash
# spark-shell
# http://localhost:4040