package com.ideal.tools.db;

import com.ideal.service.hdfs.HDFSService;
import com.ideal.tools.ssh.common.PropertyBox;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * Created by Administrator on 2016/7/30.
 */
public class ChangeDataBase {
    //记录新表与旧表的对应关系
    private HashMap<String,String> tableRelation = new HashMap<String, String>() ;

    private static Logger logger = LoggerFactory.getLogger(ChangeDataBase.class);

    /**
     * 1.初始化cluster_parameter表结构和数据
     * 2.初始化剩余表结构和部分数据（不包括和hdfsBakInfo相关的数据）
     * 3.刷新hdfs数据到新表
     * 4.初始化hdfsBakInfo相关数据
     * 5.初始化system相关数据(直接是新的数据插入到新表中)
     * 6.删除旧表
     */
    public void changeDataBase(String dbName){
        //新表对应旧表
        initTableRelation();
        //初始化cluster_parameter表结构和数据
        MysqlDBUtils.exeSQLFileInWebDB("classpath:/1.cluster_parameter_table_data.sql");
        //初始化剩余表结构和部分数据（不包括和hdfsBakInfo相关的数据）
        MysqlDBUtils.exeSQLFileInWebDB("classpath:/2.rest_table_data.sql");
        //重新加载数据库参数
        PropertyBox.reflush();
        //刷新hdfs数据到新表
        HDFSService hdfsService = new HDFSService();
        hdfsService.refreshHDFS(null);
        //初始化hdfsBakInfo相关数据
        MysqlDBUtils.exeSQLFileInWebDB("classpath:/3.hdfs_bak_info_data.sql");
        //初始化system相关数据(直接是新的数据插入到新表中)
        MysqlDBUtils.exeSQLFileInWebDB("classpath:/4.system_data.sql");
        Map<String, String> oldTable = selectOldTableCount(dbName);
        List<Map<String , Object>> newTable = selectNewTableCount(dbName);

        for (Map<String, Object> curMap: newTable){
            String newTableName = curMap.get("table_name").toString();
            String newTableCount = curMap.get("table_rows").toString();
            String oldTableName = tableRelation.get(newTableName);
            if(oldTableName !=  null && !"".equals(oldTableName)){
               System.out.println("新 " + newTableName + " 表有 " + newTableCount + " 条数据！对应旧表 " + oldTableName + " " + oldTable.get(oldTableName) + " 条！");
            } else{
                System.out.println("新 " + newTableName + " 表有 " + newTableCount + " 条数据！");
            }
        }
        //删除旧表
        MysqlDBUtils.exeSQLFileInWebDB("classpath:/5.drop_table.sql");
    }



    /**
     * 初始化新表对应旧表
     * @return
     */
    public HashMap<String,String> initTableRelation(){
        tableRelation.put("cluster_cluster_type","d_cluster");
        tableRelation.put("cluster_machine_type","d_machin_type");
        tableRelation.put("cluster_parameter","t_parameter_new");
        tableRelation.put("cluster_machine","machine_info");
        tableRelation.put("system_company","t_customer");
        tableRelation.put("cluster_user","hadoopuser");
        tableRelation.put("cluster_user_hdfsquota","hadoopuser_hdfs");
        tableRelation.put("cluster_user_kbrconfig","kdc_config");
        tableRelation.put("cluster_user_queue","fair_scheduler_allocation");
        tableRelation.put("meta_hdfs_info","hdfs_info");
        tableRelation.put("meta_hdfs_access","hdfs_visiters");
        tableRelation.put("meta_hive_info","hive_info");
        tableRelation.put("meta_hive_access","hive_visiters");
        return tableRelation;

    }

    /**
     * 查询旧表对应的记录数
     * @return
     */
    public Map<String, String> selectOldTableCount(String dbName){
        Map<String, String> oldTable = new HashMap<String, String>();
        StringBuffer sql = new StringBuffer("");
        sql.append("select table_name,table_rows from information_schema.tables where TABLE_SCHEMA = '"+dbName+"'");
        List<Map<String , Object>> list = MysqlDBUtils.queryWebDB(sql.toString());
        if(null!= list && list.size()>0){
            for (Map<String, Object> user : list) {
                oldTable.put(user.get("table_name").toString(), user.get("table_rows").toString());
            }
        }
        return oldTable;
    }

    /**
     * 查询新表对应的记录数
     * @return
     */
    public List<Map<String , Object>> selectNewTableCount(String dbName){
        StringBuffer sql = new StringBuffer("");
        String s = "'cluster_cluster_type','cluster_machine','cluster_machine_type'" +
                ",'cluster_parameter','cluster_user','cluster_user_clientquota'" +
                ",'cluster_user_hdfsquota','cluster_user_kbrauth','cluster_user_kbrconfig'" +
                ",'cluster_user_queue','cluster_user_type','meta_hdfs_access'" +
                ",'meta_hdfs_info','meta_hdfs_info_bak','meta_hive_access'" +
                ",'meta_hive_info','meta_hive_sql','system_company'" +
                ",'system_menu','system_role','system_role_menu'" +
                ",'system_user','system_user_role'" +
                ",'system_web_log','system_web_operations'";
        sql.append("select table_name,table_rows from information_schema.tables where table_name in ("+s+") and TABLE_SCHEMA = '"+dbName+"'");
        List<Map<String , Object>> list = MysqlDBUtils.queryWebDB(sql.toString());
        return list;
    }

//    public static void main(String[] args) {
//        ChangeDataBase temp = new ChangeDataBase();
//        temp.changeDataBase("hadoopadmin_dw_sd");
//    }
}
