ARG SPARK_VERSION
ARG BASE_OS

FROM registry.openanolis.cn/openanolis/anolisos:23 as downloader
ARG SPARK_VERSION

ARG SPARK_FILE=spark-${SPARK_VERSION}-bin-hadoop3

RUN yum install -y tar gzip

RUN curl -O -m 600 https://mirrors.ustc.edu.cn/apache/spark/spark-${SPARK_VERSION}/${SPARK_FILE}.tgz
RUN gzip -d ${SPARK_FILE}.tgz
RUN tar xf ${SPARK_FILE}.tar
RUN mv ${SPARK_FILE} spark

COPY tini-amd64 /
COPY tini-arm64 /

RUN set -eux; \
    if [ "$(uname -m)" == "aarch64" ]; then \
        mv tini-arm64 tini; \
    else \
        mv tini-amd64 tini; \
    fi; \
    chmod +x tini

FROM ${BASE_OS}

ARG SPARK_VERSION
ARG OS_SUFFIX
ARG JAVA_VERSION
ARG spark_uid=185

LABEL org.opencontainers.image.title="Spark" \
      org.opencontainers.image.version="${SPARK_VERSION}-${OS_SUFFIX}"

ENV SPARK_VERSION ${SPARK_VERSION}
ENV JAVA_HOME /etc/alternatives/jre
ENV PATH $JAVA_HOME/bin:$PATH

RUN set -eux; \
    yum install -y net-tools iputils hostname; \
    case "${JAVA_VERSION}" in \
        "8" ) \
            yum install -y java-1.8.0-alibaba-dragonwell \
    ;; \
        "11" ) \
            yum install -y java-11-alibaba-dragonwell \
    ;; \
        * ) \
            echo unsupported java version: ${JAVA_VERSION}; exit 1 \
    ;; \
    esac; \
    # (java -XshowSettings:properties -version 2>&1 > /dev/null | grep 'java.home' | awk '{print $3}' | xargs -I{} ln -s {} ${JAVA_HOME}); \
    mkdir -p /opt/spark; \
    mkdir -p /opt/spark/examples; \
    mkdir -p /opt/spark/work-dir; \
    touch /opt/spark/RELEASE; \
    echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su; \
    chgrp root /etc/passwd && chmod ug+rw /etc/passwd; \
    yum clean all; \
    groupadd --system --gid=${spark_uid} spark; \
    useradd --system --uid=${spark_uid} --gid=spark spark; \
    chown -R spark:spark /opt/spark

COPY --from=downloader --chown=spark:spark /spark/jars /opt/spark/jars
COPY --from=downloader --chown=spark:spark /spark/bin /opt/spark/bin
COPY --from=downloader --chown=spark:spark /spark/sbin /opt/spark/sbin
COPY --from=downloader --chown=spark:spark /spark/kubernetes/dockerfiles/spark/entrypoint.sh /opt/
COPY --from=downloader --chown=spark:spark /spark/kubernetes/dockerfiles/spark/decom.sh /opt/
COPY --from=downloader --chown=spark:spark /spark/examples /opt/spark/examples
COPY --from=downloader --chown=spark:spark /spark/kubernetes/tests /opt/spark/tests
COPY --from=downloader --chown=spark:spark /spark/data /opt/spark/data

ENV SPARK_HOME /opt/spark
ENV PATH ${SPARK_HOME}/bin:${PATH}

WORKDIR /opt/spark/work-dir

RUN set -eux; \
    chmod g+w /opt/spark/work-dir; \
    chmod a+x /opt/decom.sh

COPY --from=downloader /tini /usr/bin/tini

ENTRYPOINT [ "/opt/entrypoint.sh" ]

# Specify the User that the actual main process will run as
USER ${spark_uid}
