FROM centos:7
MAINTAINER sqm <sun.qingmeng@embedway.com>

RUN yum install -y openssh-server openssh-clients vim net-tools
RUN yum install iproute -y
RUN yum install wget -y

# ssh without key
RUN ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa
RUN cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys && \
    echo "StrictHostKeyChecking no" >> /etc/ssh/ssh_config && \
    echo "LogLevel		ERROR" >>/etc/ssh/ssh_config && \
    echo "UserKnownHostsFile /dev/null" >> /etc/ssh/ssh_config

RUN echo "root:123" | chpasswd
WORKDIR /root

RUN wget -N http://archive.apache.org/dist/hadoop/core/hadoop-2.7.2/hadoop-2.7.2.tar.gz -P tarball
RUN wget -N http://archive.apache.org/dist/spark/spark-2.4.7/spark-2.4.7-bin-hadoop2.7.tgz -P tarball
RUN wget -N https://repo.huaweicloud.com/java/jdk/8u181-b13/jdk-8u181-linux-x64.tar.gz -P tarball

# install jdk
RUN tar xvf tarball/jdk-8u181-linux-x64.tar.gz -C /usr/local/
ENV JAVA_HOME /usr/local/jdk1.8.0_181
ENV CLASSPATH $JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar
ENV PATH $PATH:$JAVA_HOME/bin

# install hadoop 2.7.2
RUN tar xvf tarball/hadoop-2.7.2.tar.gz -C /usr/local/
RUN ln -snf /usr/local/hadoop-2.7.2 /usr/local/hadoop
ENV HADOOP_HOME /usr/local/hadoop
ENV PATH $PATH:/usr/local/hadoop/bin:/usr/local/hadoop/sbin

## 创建hdfs的dn,nn目录
RUN mkdir -p /root/hdfs/namenode && \
    mkdir -p /root/hdfs/datanode && \
    mkdir $HADOOP_HOME/logs

## config hadoop
COPY config/* /usr/local/hadoop/etc/hadoop/
RUN sed -i  '/export JAVA_HOME/c export JAVA_HOME=/usr/local/jdk1.8.0_181' $HADOOP_HOME/etc/hadoop/hadoop-env.sh && \
    sed -i  '/export HADOOP_CLIENT_OPTS/c export HADOOP_CLIENT_OPTS="-Xmx1024m $HADOOP_CLIENT_OPTS"' $HADOOP_HOME/etc/hadoop/hadoop-env.sh

## add scripts
COPY scripts/* /root/
RUN chmod +x $HADOOP_HOME/sbin/start-dfs.sh && \
    chmod +x $HADOOP_HOME/sbin/start-yarn.sh

## format namenode
RUN /usr/local/hadoop/bin/hdfs namenode -format

# Add spark
ENV HADOOP_CONF_DIR $HADOOP_HOME/etc/hadoop
ENV YARN_CONF_DIR $HADOOP_CONF_DIR
RUN tar xvf tarball/spark-2.4.7-bin-hadoop2.7.tgz -C /usr/local
RUN ln -snf /usr/local/spark-2.4.7-bin-hadoop2.7 /usr/local/spark
ENV SPARK_HOME /usr/local/spark
ENV PATH $PATH:/usr/local/spark/bin:/usr/local/spark/sbin
COPY config/spark-env.sh /usr/local/spark/conf
COPY config/slaves /usr/local/spark/conf
COPY config/fairscheduler.xml /usr/local/spark/conf
RUN cp $SPARK_HOME/conf/log4j.properties.template $SPARK_HOME/conf/log4j.properties && \
    sed -i 's/INFO/WARN/g' $SPARK_HOME/conf/log4j.properties

## add delta
RUN wget -N https://repo1.maven.org/maven2/io/delta/delta-core_2.11/0.6.0/delta-core_2.11-0.6.0.jar -P $SPARK_HOME/jars
## add mysql connector
RUN wget -N https://repo1.maven.org/maven2/mysql/mysql-connector-java/5.1.38/mysql-connector-java-5.1.38.jar -P $SPARK_HOME/jars

# systemctl Failed to get D-Bus connection: Operation not permitted
CMD init

# 启动hdfs: start-dfs.sh 查看master:50070端口
# 启动spark集群: sh $SPARK_HOME/sbin/start-all.sh 查看master:18080
# 测试delta表: sh test_delta.sh
