FROM ubuntu:lin
MAINTAINER huzy yz271544@qq.com

ENV BUILD_ON 2021-01-05

COPY config /tmp
#RUN mv /tmp/apt.conf /etc/apt/
RUN mkdir -p ~/.pip/
RUN mv /tmp/pip.conf ~/.pip/pip.conf


#RUN apt-get update && apt-get install -y python3-distutils python3-setuptools 

RUN apt-get install -y vim net-tools

RUN pip install pandas  numpy  matplotlib  sklearn  seaborn  scipy tensorflow  gensim #--proxy http://root:1qazxcde32@192.168.0.4:7890/
#添加JDK
ADD ./software/jdk-8u271-linux-x64.tar.gz /usr/local/
#添加hadoop
ADD ./software/hadoop-3.2.1.tar.gz  /usr/local
#添加scala
ADD ./software/scala-2.11.12.tgz /usr/local
#添加spark
ADD ./software/spark-3.0.1-bin-hadoop3.2.tgz /usr/local
#添加Zeppelin
ADD ./software/zeppelin-0.9.0-bin-all.tgz /usr/local
#添加mysql
ADD ./software/mysql-5.5.49-linux2.6-x86_64.tar.gz /usr/local
RUN mv /usr/local/mysql-5.5.49-linux2.6-x86_64  /usr/local/mysql
ENV MYSQL_HOME /usr/local/mysql
# 添加datax
ADD ./software/datax.tar.gz /usr/local
ENV DATAX_HOME /usr/local/datax

#添加hive
ADD ./software/apache-hive-3.1.2-bin.tar.gz /usr/local
ENV HIVE_HOME /usr/local/apache-hive-3.1.2-bin
RUN echo "HADOOP_HOME=/usr/local/hadoop-3.2.1"  | cat >> /usr/local/apache-hive-3.1.2-bin/conf/hive-env.sh
#添加mysql-connector-java-5.1.48-bin.jar到hive的lib目录中
ADD ./software/mysql-connector-java-5.1.48-bin.jar /usr/local/apache-hive-3.1.2-bin/lib
RUN cp /usr/local/apache-hive-3.1.2-bin/lib/mysql-connector-java-5.1.48-bin.jar /usr/local/spark-3.0.1-bin-hadoop3.2/jars
RUN rm -f /usr/local/apache-hive-3.1.2-bin/lib/guava-19.0.jar
RUN cp /usr/local/hadoop-3.2.1/share/hadoop/common/lib/guava-27.0-jre.jar /usr/local/apache-hive-3.1.2-bin/lib/guava-27.0-jre.jar

#增加JAVA_HOME环境变量
ENV JAVA_HOME /usr/local/jdk1.8.0_271
#hadoop环境变量
ENV HADOOP_HOME /usr/local/hadoop-3.2.1
#scala环境变量
ENV SCALA_HOME /usr/local/scala-2.11.12
#spark环境变量
ENV SPARK_HOME /usr/local/spark-3.0.1-bin-hadoop3.2
#Zeppelin环境变量
ENV ZEPPELIN_HOME /usr/local/zeppelin-0.9.0-bin-all
#将环境变量添加到系统变量中
ENV PATH $HIVE_HOME/bin:$MYSQL_HOME/bin:$SCALA_HOME/bin:$SPARK_HOME/bin:$ZEPPELIN_HOME/bin:$HADOOP_HOME/bin:$JAVA_HOME/bin:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar:$PATH

RUN ssh-keygen -t rsa -f ~/.ssh/id_rsa -P '' && \
    cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys && \
    chmod 600 ~/.ssh/authorized_keys

COPY config /tmp
#将配置移动到正确的位置
RUN mv /tmp/ssh_config    ~/.ssh/config && \
    mv /tmp/profile /etc/profile && \
    mv /tmp/masters $SPARK_HOME/conf/masters && \
    cp /tmp/slaves $SPARK_HOME/conf/ && \
    mv /tmp/spark-defaults.conf $SPARK_HOME/conf/spark-defaults.conf && \
    mv /tmp/spark-env.sh $SPARK_HOME/conf/spark-env.sh && \ 
    mv /tmp/zeppelin-env.sh $ZEPPELIN_HOME/conf/zeppelin-env.sh && \
    mv /tmp/zeppelin-site.xml $ZEPPELIN_HOME/conf/zeppelin-site.xml && \
    cp /tmp/hive-site.xml $SPARK_HOME/conf/hive-site.xml && \
    mv /tmp/hive-site.xml $HIVE_HOME/conf/hive-site.xml && \
	mv /tmp/start-dfs.sh $HADOOP_HOME/sbin/ && \
	mv /tmp/stop-dfs.sh $HADOOP_HOME/sbin/ && \
	mv /tmp/start-yarn.sh $HADOOP_HOME/sbin/ && \
	mv /tmp/stop-yarn.sh $HADOOP_HOME/sbin/ && \
    mv /tmp/hadoop-env.sh $HADOOP_HOME/etc/hadoop/hadoop-env.sh && \
    mv /tmp/hdfs-site.xml $HADOOP_HOME/etc/hadoop/hdfs-site.xml && \ 
    mv /tmp/core-site.xml $HADOOP_HOME/etc/hadoop/core-site.xml && \
    mv /tmp/yarn-site.xml $HADOOP_HOME/etc/hadoop/yarn-site.xml && \
    mv /tmp/mapred-site.xml $HADOOP_HOME/etc/hadoop/mapred-site.xml && \
	mv /tmp/workers $HADOOP_HOME/etc/hadoop/workers && \
    mv /tmp/start-hadoop.sh ~/start-hadoop.sh && \
    mkdir -p /usr/local/hadoop3.0/dfs/data && \
    mkdir -p /usr/local/hadoop3.0/dfs/name && \
    mv /tmp/init_mysql.sh ~/init_mysql.sh && chmod 700 ~/init_mysql.sh && \
    mv /tmp/init_hive.sh ~/init_hive.sh && chmod 700 ~/init_hive.sh && \
    mv /tmp/restart-hadoop.sh ~/restart-hadoop.sh && chmod 700 ~/restart-hadoop.sh && \
    mv /tmp/zeppelin-daemon.sh ~/zeppelin-daemon.sh && chmod 700 ~/zeppelin-daemon.sh && \
	mv /tmp/datax.py $DATAX_HOME/bin/datax.py && \
	mv /tmp/dxprof.py $DATAX_HOME/bin/dxprof.py && \
	mv /tmp/perftrace.py $DATAX_HOME/bin/perftrace.py && \
	mv /tmp/my.cnf /etc/my.cnf
	

# mv /tmp/slaves $HADOOP_HOME/etc/hadoop/slaves && \
#创建Zeppelin环境需要的目录,设置在zeppelin-env.sh中
RUN mkdir /var/log/zeppelin && mkdir /var/run/zeppelin && mkdir /var/tmp/zeppelin

RUN echo $JAVA_HOME
#设置工作目录
WORKDIR /root
#启动sshd服务
RUN /etc/init.d/ssh start
#修改start-hadoop.sh权限为700
RUN chmod 700 start-hadoop.sh
#修改root密码
RUN echo "root:555555" | chpasswd
CMD ["/bin/bash"]
