FROM wuhongbo7/base:alpine-jdk8-py3

RUN addgroup -g 70 -S rainb \
  && adduser -u 70 -S -D -G rainb -s /bin/bash rainb

COPY setConf.py conf.properties entrypoint.sh /tmp/
ADD *.tar.gz /opt/

HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 CMD curl -f http://localhost:9870/ || exit 1

# 环境设置
ENV HADOOP_VERSION=3.1.4 \
  HIVE_VERSION=3.1.2
ENV HADOOP_HOME=/opt/hadoop-${HADOOP_VERSION} \
  HIVE_HOME=/opt/apache-hive-${HIVE_VERSION}-bin
ENV HADOOP_CONF_DIR=${HADOOP_HOME}/etc/hadoop \
  HIVE_CONF_DIR=${HIVE_HOME}/conf \
  HADOOP_URL=https://mirrors.tuna.tsinghua.edu.cn/apache/hadoop/common/hadoop-${HADOOP_VERSION}/hadoop-${HADOOP_VERSION}.tar.gz \
  HIVE_URL=https://mirrors.tuna.tsinghua.edu.cn/apache/hive/hive-${HIVE_VERSION}/apache-hive-${HIVE_VERSION}-bin.tar.gz \
  PATH=${PATH}:${HADOOP_HOME}/bin:${HIVE_HOME}/bin

EXPOSE 9870:9870 9864:9864 8088:8088 8042:8042 19888:19888 10000:10000 9083:9083

RUN /tmp/entrypoint.sh

# 启动Hadoop
# ENTRYPOINT [ "hdfs namenode && hdfs datanode && yarn resourcemanager && yarn nodemanager && mapred historyserver" ] 
# CMD [ "bash && hdfs namenode && hdfs datanode && hive" ]