FROM centos:7
# 镜像的作者  
 MAINTAINER zane_feng

# 更新yum源-安装ssh服务
RUN  mkdir /opt/centos-yum.bak
RUN  mv /etc/yum.repos.d/* /opt/centos-yum.bak/
RUN  curl -o /etc/yum.repos.d/CentOS-Base.repo https://mirrors.aliyun.com/repo/Centos-7.repo
RUN  yum clean all
RUN  yum makecache
RUN  yum -y install openssh-server
RUN  sed -i 's/UsePAM yes/UsePAM no/g'  /etc/ssh/sshd_config
RUN  yum -y install openssh-clients
RUN echo "root:1"  | chpasswd
RUN echo "root ALL=(ALL)  ALL"  >> /etc/sudoers
RUN ssh-keygen -t dsa  -f /etc/ssh/ssh_host_dsa_key
RUN ssh-keygen -t rsa  -f /etc/ssh/ssh_host_rsa_key
RUN mkdir /var/run/sshd
EXPOSE 22
EXPOSE 6655
# jdk本地文件直接复制进镜像，注意压缩包复制进去后会被解压
ADD jdk-8u202-linux-x64.tar.gz /usr/local
# 解压之后是 jdk1.8.0_202
# RUN tar xvf jdk-8u202-linux-x64.tar.gz
RUN mv /usr/local/jdk1.8.0_202 /usr/local/jdk8
ENV JAVA_HOME /usr/local/jdk8
ENV PATH $JAVA_HOME/bin:$PATH

# hadoop本地文件直接复制进镜像，注意压缩包复制进去后会被解压
ADD hadoop-3.3.1.tar.gz /usr/local
RUN mv /usr/local/hadoop-3.3.1 /usr/local/hadoop

# 复制expect安装包到容器
ADD tcl8.6.12-src.tar.gz /usr/local/src
ADD expect5.45.4.tar.gz /usr/local/src

# 复制连接执行jar包
COPY hadoopDocker-start.jar /usr/local/src

# 复制启动执行脚本依赖到容器
ADD ssh-copy.exp /usr/local/src
ADD sshKeygen.exp /usr/local/src
ADD starthadoop.sh /usr/local/src

ENV HADOOP_HOME /usr/local/hadoop
ENV PATH $HADOOP_HOME/bin:$PATH
ENV PATH $PATH:/usr/expect/bin/

# 覆盖配置文件
ADD hadoop_config/* /usr/local/hadoop/etc/hadoop/

# 增加环境变量
RUN echo 'export JAVA_HOME=/usr/local/jdk8' >> /usr/local/hadoop/etc/hadoop/hadoop-env.sh

# 配置 hosts，自动执行其余脚本
ADD entrypoint.sh /
RUN chmod +x /entrypoint.sh
ENTRYPOINT ["/entrypoint.sh"]

# 修改 start-dfs , 增加启动用户
RUN sed -i '/#!\/usr\/bin\/env bash/a\
				HDFS_DATANODE_USER=root\
				HDFS_DATANODE_SECURE_USER=hdfs\
				HDFS_NAMENODE_USER=root\
				HDFS_SECONDARYNAMENODE_USER=root\n' /usr/local/hadoop/sbin/start-dfs.sh

# 修改 stop-dfs , 增加启动用户
RUN sed -i '/#!\/usr\/bin\/env bash/a\
				HDFS_DATANODE_USER=root\
				HDFS_DATANODE_SECURE_USER=hdfs\
				HDFS_NAMENODE_USER=root\
				HDFS_SECONDARYNAMENODE_USER=root\n' /usr/local/hadoop/sbin/stop-dfs.sh
				
# 修改 start-yarn.sh
RUN sed -i '/#!\/usr\/bin\/env bash/a\
				YARN_RESOURCEMANAGER_USER=root\
				HADOOP_SECURE_DN_USER=yarn\
				YARN_NODEMANAGER_USER=root\n' /usr/local/hadoop/sbin/start-yarn.sh
				
# 修改 stop-yarn.sh
RUN sed -i '/#!\/usr\/bin\/env bash/a\
				YARN_RESOURCEMANAGER_USER=root\
				HADOOP_SECURE_DN_USER=yarn\
				YARN_NODEMANAGER_USER=root\n' /usr/local/hadoop/sbin/stop-yarn.sh
				
# 修改 workers 文件 将localhost替换掉
RUN echo 'hadoop2' > /usr/local/hadoop/etc/hadoop/workers 
RUN echo 'hadoop3' >> /usr/local/hadoop/etc/hadoop/workers 


