ARG SPARK_VERSION=3.3.2

FROM alibaba-cloud-linux-3-registry.cn-hangzhou.cr.aliyuncs.com/alinux3/alinux3 as downloader
ARG SPARK_VERSION

ARG SPARK_FILE=spark-${SPARK_VERSION}-bin-hadoop3

RUN yum install -y tar

RUN curl -O -m 600 https://mirrors.ustc.edu.cn/apache/spark/spark-${SPARK_VERSION}/${SPARK_FILE}.tgz
RUN gzip -d ${SPARK_FILE}.tgz
RUN tar xf ${SPARK_FILE}.tar
RUN mv ${SPARK_FILE} spark

COPY tini-amd64 /
COPY tini-arm64 /

RUN set -eux; \
    if [ "$(uname -m)" == "aarch64" ]; then \
        mv tini-arm64 tini; \
    else \
        mv tini-amd64 tini; \
    fi

FROM alibaba-cloud-linux-3-registry.cn-hangzhou.cr.aliyuncs.com/alinux3/alinux3
ARG SPARK_VERSION

ENV SPARK_VERSION ${SPARK_VERSION}

LABEL org.opencontainers.image.title="Spark" \
      org.opencontainers.image.version="${SPARK_VERSION}-3"

RUN set -eux; \
    yum install -y java-1.8.0-alibaba-dragonwell net-tools iputils hostname; \
    # yun install -y java-11-alibaba-dragonwell ; \
    mkdir -p /opt/spark; \
    mkdir -p /opt/spark/examples; \
    mkdir -p /opt/spark/work-dir; \
    touch /opt/spark/RELEASE; \
    echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su; \
    chgrp root /etc/passwd && chmod ug+rw /etc/passwd; \
    yum clean all

COPY --from=downloader /spark/jars /opt/spark/jars
COPY --from=downloader /spark/bin /opt/spark/bin
COPY --from=downloader /spark/sbin /opt/spark/sbin
COPY --from=downloader /spark/kubernetes/dockerfiles/spark/entrypoint.sh /opt/
COPY --from=downloader /spark/kubernetes/dockerfiles/spark/decom.sh /opt/
COPY --from=downloader /spark/examples /opt/spark/examples
COPY --from=downloader /spark/kubernetes/tests /opt/spark/tests
COPY --from=downloader /spark/data /opt/spark/data

ENV SPARK_HOME /opt/spark

WORKDIR /opt/spark/work-dir
RUN chmod g+w /opt/spark/work-dir
RUN chmod a+x /opt/decom.sh

COPY --from=downloader /tini /usr/bin/tini

ENTRYPOINT [ "/opt/entrypoint.sh" ]

# Specify the User that the actual main process will run as
USER ${spark_uid}
