#!/bin/sh

set_jdk()
{
        yum install -y tar wget rpm-build spectool
        cd /bigdata
        tar -zxvf OpenJDK8U-jdk_aarch64_linux_hotspot_8u222b10.tar.gz -C /usr/local/ 
        cd /usr/local
        mv jdk8u222-b10 java
        # chown -R root:root jdk8u222-b10/
        # chmod -R 755 jdk8u222-b10/

        cat <<-EOF >> /etc/profile
        #java
        export JAVA_HOME=/usr/local/java
        export PATH=\$PATH:\$JAVA_HOME/bin
        EOF
        source /etc/profile
        java -version
}

set_hadoop()
{
        cd /bigdata
        tar -zxvf hadoop-2.7.3.tar.gz -C /usr/local/
        cd /usr/local
        mv hadoop-2.7.3 hadoop
        chown -R root:root ./hadoop

        cat <<-EOF >> /etc/profile
        #hadoop
        export HADOOP_HOME=/usr/local/hadoop
        export HADOOP_INSTALL=\$HADOOP_HOME
        export HADOOP_MAPRED_HOME=\$HADOOP_HOME
        export HADOOP_COMMON_HOME=\$HADOOP_HOME
        export HADOOP_HDFS_HOME=\$HADOOP_HOME
        export YARN_HOME=\$HADOOP_HOME
        export HADOOP_COMMON_LIB_NATIVE_DIR=\$HADOOP_HOME/lib/native
        export PATH=\$PATH:\$HADOOP_HOME/sbin:\$HADOOP_HOME/bin
        EOF
        source /etc/profile
        whereis hdfs
        whereis start-all.sh
        hadoop version
}

set_hadoop_env()
{
        cd /usr/local/hadoop/etc/hadoop
        sed '25 a\ export JAVA_HOME=/usr/local/java' -i hadoop-env.sh
        sed '26 a\ export HADOOP_SSH_OPTS="-o StrictHostKeyChecking=no"' -i hadoop-env.sh
}

set_core_site()
{
        cd /usr/local/hadoop/etc/hadoop

        cat <<-EOF > /usr/local/hadoop/etc/hadoop/core-site.xml
        <?xml version="1.0" encoding="UTF-8"?>
        <?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
        <!--
        Licensed under the Apache License, Version 2.0 (the "License");
        you may not use this file except in compliance with the License.
        You may obtain a copy of the License at

                http://www.apache.org/licenses/LICENSE-2.0

        Unless required by applicable law or agreed to in writing, software
        distributed under the License is distributed on an "AS IS" BASIS,
        WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
        See the License for the specific language governing permissions and
        limitations under the License. See accompanying LICENSE file.
        -->

        <!-- Put site-specific property overrides in this file. -->

        <configuration>
        <property>
                <name>fs.defaultFS</name>
        </property>
        <property>
                <name>hadoop.tmp.dir</name>
                <value>/usr/local/hadoop/tmp</value>
        </property>
        <property>
        <name>hadoop.proxyuser.root.hosts</name>
        <value>*</value>
        </property>
        <property>
        <name>hadoop.proxyuser.root.groups</name>
        <value>*</value>
        </property>
        </configuration>
        EOF

        sed '21 a\      <value>hdfs://'"$HOSTNAME"':9820</value>' -i core-site.xml
}

set_hdfs_site(){
        cd /usr/local/hadoop/etc/hadoop

        cat <<-EOF > /usr/local/hadoop/etc/hadoop/hdfs-site.xml
        <?xml version="1.0" encoding="UTF-8"?>
        <?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
        <!--
          Licensed under the Apache License, Version 2.0 (the "License");
          you may not use this file except in compliance with the License.
          You may obtain a copy of the License at

                http://www.apache.org/licenses/LICENSE-2.0

          Unless required by applicable law or agreed to in writing, software
          distributed under the License is distributed on an "AS IS" BASIS,
          WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
          See the License for the specific language governing permissions and
          limitations under the License. See accompanying LICENSE file.
        -->

        <!-- Put site-specific property overrides in this file. -->

        <configuration>
        <property>
                <name>dfs.replication</name>
                <value>1</value>
        </property>

        </configuration>
        EOF
}

set_mapred_site(){
        cd /usr/local/hadoop/etc/hadoop

        cat <<-EOF > /usr/local/hadoop/etc/hadoop/mapred-site.xml.template
        <?xml version="1.0"?>
        <?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
        <!--
          Licensed under the Apache License, Version 2.0 (the "License");
          you may not use this file except in compliance with the License.
          You may obtain a copy of the License at

                http://www.apache.org/licenses/LICENSE-2.0

          Unless required by applicable law or agreed to in writing, software
          distributed under the License is distributed on an "AS IS" BASIS,
          WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
          See the License for the specific language governing permissions and
          limitations under the License. See accompanying LICENSE file.
        -->

        <!-- Put site-specific property overrides in this file. -->

        <configuration>
        <property>
                <name>mapreduce.framework.name</name>
                <value>yarn</value>
        </property>
        <property>
                <name>yarn.app.mapreduce.am.env</name>
                <value>HADOOP_MAPRED_HOME=\${HADOOP_HOME}</value>
        </property>
        <property>
                <name>mapreduce.map.env</name>
                <value>HADOOP_MAPRED_HOME=\${HADOOP_HOME}</value>
        </property>
        <property>
                <name>mapreduce.reduce.env</name>
                <value>HADOOP_MAPRED_HOME=\${HADOOP_HOME}</value>
        </property>

        </configuration>
        EOF
}

set_bashrc()
{

        cat <<-EOF >> ~/.bashrc
        export HDFS_NAMENODE_USER=root
        export HDFS_DATANODE_USER=root
        export HDFS_SECONDARYNAMENODE_USER=root
        export YARN_RESOURCEMANAGER_USER=root
        export YARN_NODEMANAGER_USER=root
        EOF

        source ~/.bashrc
}

login_without_pass(){
        cd /root/.ssh
        ssh-keygen -t rsa -N '' -f id_rsa -q
        sleep 3
        cd ~/.ssh
        touch authorized_keys
        chmod 600 authorized_keys
        cat id_rsa.pub >> authorized_keys
}

set_mysql(){
        cd /bigdata
        wget https://obs.cn-north-4.myhuaweicloud.com/obs-mirror-ftp4/database/mysql-5.7.27-aarch64.tar.gz
        tar -xvf mysql-5.7.27-aarch64.tar.gz -C /usr/local/
        cd /usr/local
        mv mysql-5.7.27-aarch64 mysql
        groupadd -r mysql && useradd -r -g mysql -s /sbin/nologin -M mysql
        yum install -y libaio* 
        mkdir -p /usr/local/mysql/logs
        chown -R mysql:mysql /usr/local/mysql
        ln -sf /usr/local/mysql/my.cnf /etc/my.cnf
        cp -rf /usr/local/mysql/extra/lib* /usr/lib64/
        mv /usr/lib64/libstdc++.so.6 /usr/lib64/libstdc++.so.6.old
        ln -s /usr/lib64/libstdc++.so.6.0.24 /usr/lib64/libstdc++.so.6
        cp -rf /usr/local/mysql/support-files/mysql.server /etc/init.d/mysqld
        chmod +x /etc/init.d/mysqld
        /usr/lib/systemd/systemd-sysv-install enable mysqld
        #modify /etc/profile

        cat <<-EOF >> /etc/profile
        export MYSQL_HOME=/usr/local/mysql
        export PATH=\$PATH:\$MYSQL_HOME/bin
        EOF

        source /etc/profile
}

start_mysql(){
        cd "/usr/local/mysql/bin"
        mysqld --initialize-insecure --user=mysql --basedir=/usr/local/mysql --datadir=/usr/local/mysql/data
        systemctl start mysqld
        mysql -u root -e "use mysql; update mysql.user set authentication_string=password('root') where User='root'; flush privileges;
        use mysql;
        update mysql.user set authentication_string=password('root') where User='root';
        create database streamsets;
        GRANT ALL PRIVILEGES ON *.* TO 'root'@'%' IDENTIFIED BY 'root' WITH GRANT OPTION;
        grant all privileges on streamsets.* to root;
        FLUSH PRIVILEGES;
        use streamsets;
        create table test(id int primary key auto_increment,name varchar(50) not null,age int not null);
        create table test_target(id int primary key auto_increment,name varchar(50) not null,age int not null);
        insert into test(id,name,age) values(1,'zhou',18);
        insert into test(id,name,age) values(2,'abc',20);
        insert into test(id,name,age) values(3,'sss',11);
        insert into test(id,name,age) values(4,'aaa',12);";
}

start_hadoop(){
        source /etc/profile
        cd /usr/local/hadoop/bin
        ./hdfs namenode -format
        start-all.sh
        jps
}

set_zookeeper(){
        cd /bigdata
        tar -zxvf  apache-zookeeper-3.5.9-bin.tar.gz -C /usr/local/
        cd /usr/local
        mv apache-zookeeper-3.5.9-bin zookeeper
        cd /usr/local/zookeeper
        mkdir "data"
        mkdir "logs"
        cd /usr/local/zookeeper/conf
        touch zoo.cfg
        echo 'tickTime=2000
        initLimit=10
        syncLimit=5
        dataDir=/usr/local/zookeeper/data
        clientPort=2181'>>zoo.cfg
}

start_zookeeper(){
        cd /usr/local/zookeeper/bin
        ./zkServer.sh start
}

set_kafka(){
        cd /bigdata
        tar -zxvf kafka_2.11-2.2.2.tgz -C /usr/local/
        cd /usr/local
        mv kafka_2.11-2.2.2 kafka
        # sudo ln -s /usr/local/kafka/root/kafka
}

start_kafka(){
        cd /usr/local/kafka/bin
        nohup ./kafka-server-start.sh ../config/server.properties &
}

set_hive(){
        cd /bigdata
        tar -zxvf apache-hive-2.3.5-bin.tar.gz -C /usr/local/
        cd /usr/local
        mv apache-hive-2.3.5-bin hive
        cd /usr/local/hive/conf
        cp "hive-default.xml.template" "hive-default.xml"

        cat <<-EOF > hive-site.xml
        <?xml version="1.0" encoding="UTF-8" standalone="no"?>
        <?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
        <configuration>
        <property>
                <name>hive.metastore.schema.verification</name>
                <value>false</value>
        </property>
        <property>
                <name>hive.server2.thrift.port</name>
                <value>10000</value>
        <description>Port number of HiveServer2 Thrift interface.
         Can be overridden by setting \$HIVE_SERVER2_THRIFT_PORT</description>
        </property>
        <property>
                <name>hive.server2.thrift.bind.host</name>
                <value>0.0.0.0</value>
        <description>Bind host on which to run the HiveServer2 Thrift interface.
        Can be overridden by setting \$HIVE_SERVER2_THRIFT_BIND_HOST</description>
        </property>
        <!-- mysql username -->
        <property>
                <name>javax.jdo.option.ConnectionUserName</name>
                <value>root</value>
        </property>
        <!-- mysql password -->
        <property>
                <name>javax.jdo.option.ConnectionPassword</name>
                <value>root</value>
        </property>
        <!-- mysql connection URL if hive and mysql on the same server,using localhost -->
        <property>
                <name>javax.jdo.option.ConnectionURL</name>
                <value>jdbc:mysql://localhost:3306/hive?createDatabaseIfNotExist=true</value>
        </property>
        <!-- mysql Connector Driver -->
        <property>
                <name>javax.jdo.option.ConnectionDriverName</name>
                <value>com.mysql.jdbc.Driver</value>
        </property>
        </configuration>
        EOF

        cd /bigdata
        # wget http://124.70.1.233/mysql-connector-java-5.1.40.tar.gz
        tar -zxvf mysql-connector-java-5.1.40.tar.gz -C /usr/local/
        cd /usr/local/mysql-connector-java-5.1.40
        cp mysql-connector-java-5.1.40-bin.jar /usr/local/hive/lib
        cd /usr/local/hive/bin
        ./schematool -dbType mysql -initSchema
        
        #配置hive环境变量
        cat <<-EOF >> /etc/profile
        #hive
        export HIVE_HOME=/usr/local/hive
        export PATH=\$PATH:\$HIVE_HOME/bin
        EOF
        cp /usr/local/hive/conf/hive-site.xml /usr/local/hadoop/etc/hadoop/
        source /etc/profile
        hive -e "create database test;quit;"
}

start_hive(){
        nohup hiveserver2 &
}

set_streamsets(){
        cat <<-EOF >> /etc/profile
        # streamsets
        ulimit -HSn 32768
        # ulimit -n 65535
        export STREAMSETS_LIBRARIES_EXTRA_DIR=/usr/local/streamsets/sdc-extras/
        EOF

        cd ~
        rpmbuild -ba streamsets.spec
        cd /root/rpmbuild/SPECS

        cat <<-EOF > /root/rpmbuild/SPECS/streamsets.spec
        %global __python /usr/bin/python3
        %global __os_install_post %{nil}
        %define debug_package %{nil}
        Name: streamsets
        Summary: streamsets package
        Version: 3.3.0
        Release: 1%{?dist}
        Source0: https://archives.streamsets.com/datacollector/3.3.0/tarball/streamsets-datacollector-core-3.3.0.tgz
        URL: https://archives.streamsets.com/datacollector/3.3.0/tarball/streamsets-datacollector-core-3.3.0.tgz
        License: GPLv2
        %description
        %prep
        %setup -n%{name}-datacollector-%{version}
        %build

        %install
        mkdir -p %{buildroot}/usr/local/streamsets
        cp -r \$RPM_BUILD_DIR/%{name}-datacollector-%{version}/.   %{buildroot}/usr/local/streamsets
        cd %{buildroot}/usr/local/streamsets
        mkdir sdc-extras
        sed -i '57c grant codebase "file:///usr/local/streamsets/sdc-extras/-" {' etc/sdc-security.policy

        %files
        %attr(600,root,root) /usr/local/streamsets/etc/*
        %doc
        /usr/local/streamsets/.
        %changelog
        EOF
        sed -i '7c SELINUX=disabled' /etc/selinux/config
        echo -e 'sdc soft nofile 3276' >> /etc/security/limits.conf
        echo -e 'sdc hard nofile 32768' >> /etc/security/limits.conf
        cp /bigdata/streamsets-datacollector-core-3.3.0.tgz /root/rpmbuild/SOURCES/
        cd /root/rpmbuild/SPECS/
        rpmbuild -bb streamsets.spec
        rpm -ivh /root/rpmbuild/RPMS/aarch64/streamsets-3.3.0-1.aarch64.rpm
        # turn off firewall
        systemctl stop firewalld.service
}

start_streamsets(){
        source /etc/profile
        cd /usr/local/streamsets/bin
        ./streamsets dc &
}

main()
{
        set_jdk
        set_hadoop
        set_hadoop_env
        set_core_site
        set_hdfs_site
        set_mapred_site
        set_bashrc
        login_without_pass
        set_mysql
        start_mysql
        start_hadoop
        set_zookeeper
        start_zookeeper
        set_kafka
        start_kafka
        set_hive
        start_hive
        set_streamsets
        start_streamsets
}

main