#!/usr/bin/env bash
#1 centos上安装docker
if [ -f /usr/bin/docker ]; then
    echo 'docker installed'
else
    sudo yum install docker.x86_64
    docker -h

    sudo systemctl enable docker.service
fi

#2 docker的仓库镜像的国内站点
#https://yq.aliyun.com/articles/29941
#Docker 镜像加速器
#docker version <=1.10
:<<EOF
sudo cp -n /lib/systemd/system/docker.service /etc/systemd/system/docker.service
sudo sed -i "s|ExecStart=/usr/bin/docker daemon|ExecStart=/usr/bin/docker daemon --registry-mirror=<your accelerate address>|g" /etc/systemd/system/docker.service
sudo systemctl daemon-reload
sudo service docker restart
EOF

#docker version >1.10
sudo mkdir -p /etc/docker
sudo tee /etc/docker/daemon.json <<-'EOF'
{
  "registry-mirrors": ["https://2mt5bmc8.mirror.aliyuncs.com"]
}
EOF
sudo systemctl daemon-reload
sudo systemctl restart docker

#3 普通用户使用dcoker
#普通的用户直接使用docker命令运行 [设置完成后，注销用户重新登陆]
sudo groupadd docker
sudo gpasswd -a ${USER} docker
sudo systemctl restart docker


#4下载hadoop镜像，并参考文档启动起来
#docker hadoop环境
#https://cr.console.aliyun.com/?spm=5176.100239.blogcont29941.13.3cvDU5#/imageDesc/4030/detail
#参考说明可以进入站点
docker pull registry.cn-hangzhou.aliyuncs.com/kaibb/hadoop

#增加开放1个10000的端口镜像，以便于HOST机器的测试
docker run -itd --name hadoop -p 10000:10000 -h hadoop -P registry.cn-hangzhou.aliyuncs.com/kaibb/hadoop

docker exec -it hadoop bash

hadoop namenode –format
start-all.sh

#为了和hive单机伪分布运行模式配合起来，需要修改的地方
#http://www.aboutyun.com/thread-16507-1-1.html 用户权限
#hadoop.proxyuser.root.hosts 可加入远程主机,如： localhost,172,16.1.##
:<<EOF
hadoop/etc/hadoop/core-site.xml
<property>
      <name>hadoop.proxyuser.root.groups</name>
      <value>root</value>
      <description>Allow the superuser oozie to impersonate any members of the group group1 and group2</description>
 </property>


 <property>
      <name>hadoop.proxyuser.root.hosts</name>
      <value>*</value>
      <description>The superuser can connect only from host1 and host2 to impersonate a user</description>
  </property>
EOF
# The number of live datanodes 1 has reached the minimum number 0. In safe mode extension. Safe mode will be turned off automatically in 6 seconds.
#使用如下命令开启关闭 stop-all.sh
start-all.sh
#由于是1个节点，因此需要关闭安全模式
hadoop dfsadmin -safemode leave

#5 hive设置
#5.0 如果hive元数据使用mysql则需要安装mysql，本次测试hive的初始化脚本错误
#/opt/tools/apache-hive-2.1.1-bin/bin/schematool -initSchema -dbType mysql
#ubuntu
echo "deb http://mirrors.aliyun.com/ubuntu/ vivid main restricted universe multiverse" >  /etc/apt/sources.list
echo "deb http://mirrors.aliyun.com/ubuntu/ vivid-security main restricted universe multiverse" >>  /etc/apt/sources.list
echo "deb http://mirrors.aliyun.com/ubuntu/ vivid-updates main restricted universe multiverse" >>  /etc/apt/sources.list
echo "deb http://mirrors.aliyun.com/ubuntu/ vivid-proposed main restricted universe multiverse" >>  /etc/apt/sources.list
echo "deb http://mirrors.aliyun.com/ubuntu/ vivid-backports main restricted universe multiverse" >>  /etc/apt/sources.list
# can not use
:<<EOF
deb-src http://mirrors.aliyun.com/ubuntu/ vivid main restricted universe multiverse
deb-src http://mirrors.aliyun.com/ubuntu/ vivid-security main restricted universe multiverse
deb-src http://mirrors.aliyun.com/ubuntu/ vivid-updates main restricted universe multiverse
deb-src http://mirrors.aliyun.com/ubuntu/ vivid-proposed main restricted universe multiverse
deb-src http://mirrors.aliyun.com/ubuntu/ vivid-backports main restricted universe multiverse
EOF

apt-get install mysql-server

#5.1 hive安装
#http://blog.csdn.net/w12345_ww/article/details/51910030
wget https://mirrors.cnnic.cn/apache/hive/hive-2.1.1/apache-hive-2.1.1-bin.tar.gz --no-check-certificate
/opt/tools/apache-hive-2.1.1-bin/bin/schematool -initSchema -dbType derby

/etc/bash.bashrc
export JAVA_HOME=/opt/tools/jdk1.8.0_77
export HADOOP_PREFIX=/opt/tools/hadoop
export HADOOP_COMMON_HOME=/opt/tools/hadoop
export HADOOP_HDFS_HOME=/opt/tools/hadoop
export HADOOP_MAPRED_HOME=/opt/tools/hadoop
export HADOOP_YARN_HOME=/opt/tools/hadoop
export HADOOP_CONF_DIR=/opt/tools/hadoop/etc/hadoop
export YARN_CONF_DIR=$HADOOP_PREFIX/etc/hadoop
export HIVE_HOME=/opt/tools/apache-hive-2.1.1-bin
export PATH=$PATH:$JAVA_HOME/bin:$HADOOP_PREFIX/bin:$HADOOP_PREFIX/sbin:$HIVE_HOME/bin

#运行hive服务和本机客户端
$ $HIVE_HOME/bin/hiveserver2
./beeline -u jdbc:hive2://localhost:10000 -n root -p root

#6 JDBC远程客户端
#https://mirrors.cnnic.cn/apache/hive/hive-2.1.1/apache-hive-2.1.1-bin.tar.gz 这个运行宝不包含hive-jdbc jar包
#使用源代码编译 mvn package -Dmaven.test.skip=true
#http://mirror.bit.edu.cn/apache/hive/hive-2.1.1/apache-hive-2.1.1-src.tar.gz
# https://cwiki.apache.org/confluence/display/Hive/HiveServer2+Clients
#https://cwiki.apache.org/confluence/display/Hive/HiveClient  standalone server模式
#Thrift IDL file for TCLIService: https://github.com/apache/hive/blob/master/service-rpc/if/TCLIService.thrift
#SQL Developer连接hivehttps://community.hortonworks.com/articles/1887/connect-oracle-sql-developer-to-hive.html

#hivedocker使用http://repository.mapr.com/maven/的依赖包有问题
#hiveorg使用代码编译的包进行测试

#docker container的导入 导出 http://blog.csdn.net/opensure/article/details/46559071
#导出容器 EXPOSE端口丢失 启动时还需要加命令
#cat hive.tar|docker import - hive
#docker run -itd --name h1 -p 10000:10000 -h hadoop -P hive /usr/sbin/sshd -D


#导出镜像就OK了 自己设置的内容导到别的机器就可以用了
docker commit hadoop hive
docker save hive > hive.tar
docker rmi hive
docker load < hive.tar
#此时启动hive的镜像就包含了hive的定制内容


