#!/bin/bash
# 1.配置免密登录
ssh_login(){
	yum install expect  #安装expect
	echo "按enter键3次即可"
	ssh-keygen -t rsa   #生成秘钥（按enter键3次即可生成）
	SERVERS=192.168.93.128  #需要配置的主机名
	PASSWORD=123456 #需要配置的主机登录密码
	
	expect -c "set timeout -1;
    spawn ssh-copy-id -p 22 $SERVERS;                                
    expect {
            *(yes/no)* {send -- yes\r;exp_continue;}
            *password:* {send -- $PASSWORD\r;exp_continue;}  
            eof        {exit 0;}
    }";	
}

# 2.关闭防火墙
disable_firewalld(){
	systemctl stop firewalld
	systemctl disable firewalld
	systemctl status firewalld
}

# 3.修改主机名
change_hostname(){
	hostnamectl set-hostname spark1
	cat /etc/hostname
}

# 4.安装JDK
install_jdk(){
	tar -zxvf jdk/jdk-8u191-linux-x64.tar.gz -C /opt

	cat >> ~/.bash_profile << EOF

# java环境变量配置
export JAVA_HOME=/opt/jdk1.8.0_191
export PATH=\$JAVA_HOME/bin:\$JAVA_HOME/jre/bin:\$PATH
EOF
	source ~/.bash_profile
	java -version
}

# 4.安装zookeeper
install_zookeeper(){
	tar -zxvf zookeeper/apache-zookeeper-3.5.5-bin.tar.gz -C /opt

	cat >> ~/.bash_profile << EOF

# zookeeper 
export ZK_HOME=/opt/apache-zookeeper-3.5.5-bin
export PATH=\$ZK_HOME/bin:\$PATH
EOF
	source ~/.bash_profile
	cp zookeeper/zoo.cfg /opt/apache-zookeeper-3.5.5-bin/conf
	zkServer.sh start
	jps
}

# 5.安装hadoop
install_hadoop(){
	tar -zxvf hadoop/hadoop-2.7.3.tar.gz -C /opt
	
	cat >> ~/.bash_profile << EOF

# hadoop
export HADOOP_HOME=/opt/hadoop-2.7.3
export PATH=\$HADOOP_HOME/bin:\$HADOOP_HOME/sbin:\$PATH
EOF
	source ~/.bash_profile
	cp hadoop/hadoop-env.sh /opt/hadoop-2.7.3/etc/hadoop
	cp hadoop/hdfs-site.xml /opt/hadoop-2.7.3/etc/hadoop
	cp hadoop/core-site.xml /opt/hadoop-2.7.3/etc/hadoop
	cp hadoop/slaves /opt/hadoop-2.7.3/etc/hadoop
	/opt/hadoop-2.7.3/bin/./hdfs namenode -format
	cp hadoop/mapred-site.xml /opt/hadoop-2.7.3/etc/hadoop
	cp hadoop/yarn-site.xml /opt/hadoop-2.7.3/etc/hadoop
	start-dfs.sh
	start-yarn.sh
	jps
}

# 6.安装hbase
install_hbase(){
	tar -zxvf hbase/hbase-1.3.1-bin.tar.gz -C /opt
	cat >> ~/.bash_profile << EOF

# hbase
export HBASE_HOME=/opt/hbase-1.3.1
export PATH=\$HBASE_HOME/bin:\$PATH
EOF
	source ~/.bash_profile
	cp hbase/hbase-env.sh /opt/hbase-1.3.1/conf
	cp hbase/hbase-site.xml /opt/hbase-1.3.1/conf
	cp hbase/regionservers /opt/hbase-1.3.1/conf
	start-hbase.sh
}

# 7.安装spark
install_spark(){
	tar -zxvf spark/spark-2.2.0-bin-2.7.3.tgz -C /opt
	cat >> ~/.bash_profile << EOF

# spark
export SPARK_HOME=/opt/spark-2.2.0-bin-2.7.3
export PATH=\$SPARK_HOME/bin:\$PATH
EOF
	source ~/.bash_profile
}

# 8.安装mysql
install_mysql(){
	tar -zxvf mysql/mysql-5.7.28-linux-glibc2.12-x86_64.tar.gz
	mv mysql-5.7.28-linux-glibc2.12-x86_64 /opt/mysql-5.7.28
	cat >> ~/.bash_profile << EOF

# mysql
export MYSQL_HOME=/opt/mysql-5.7.28
export PATH=\$MYSQL_HOME/bin:\$PATH
EOF
	source ~/.bash_profile
	
	mkdir /opt/mysql-5.7.28/data
	echo yes|cp mysql/my.cnf /etc/my.cnf

	/opt/mysql-5.7.28/./mysqld --initialize --user=mysql --basedir=/opt/mysql --datadir=/opt/mysql-5.7.28/data
}

# 9.安装kafka
install_kafka(){
	tar -zxvf kafka/kafka_2.11-2.1.1.tgz -C /opt
	cat >> ~/.bash_profile << EOF

# kafka
export KAFKA_HOME=/opt/kafka_2.11-2.1.1
export PATH=\$KAFKA_HOME/bin:\$PATH
EOF
	source ~/.bash_profile
	
	cp kafka/server.properties /opt/kafka_2.11-2.1.1/config
	kafka-server-start.sh $KAFKA_HOME/config/server.properties
}


# 10.安装flume
install_flume(){
	tar -zxvf flume/apache-flume-1.6.0-bin.tar.gz
	mv apache-flume-1.6.0-bin /opt/flume-1.6.0
	cat >> ~/.bash_profile << EOF

# flume
export FLUME_HOME=/opt/flume-1.6.0
export PATH=\$FLUME_HOME/bin:\$PATH
EOF
	source ~/.bash_profile

	cp flume/flume-env.sh /opt/flume-1.6.0/conf
}
