package com.hexinfo.dmpro.sparing.service.impl;

import cn.hutool.core.date.DateUnit;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.io.FileUtil;
import cn.hutool.core.io.file.FileReader;
import cn.hutool.core.text.StrSpliter;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.db.Db;
import cn.hutool.db.DbUtil;
import cn.hutool.db.Entity;
import cn.hutool.db.ds.DSFactory;
import cn.hutool.db.handler.*;
import cn.hutool.db.sql.SqlExecutor;
import com.alibaba.fastjson.JSON;
import com.hexinfo.dmpro.common.utils.CommonConstants;
import com.hexinfo.dmpro.common.utils.ConnExecuteUtil;
import com.hexinfo.dmpro.common.utils.ExportBeanExcel;
import com.hexinfo.dmpro.common.utils.ScanCommonConstants;
import com.hexinfo.dmpro.sparing.dto.CreateTableDTO;
import com.hexinfo.dmpro.sparing.dto.LatestTableDTO;
import com.hexinfo.dmpro.sparing.dto.LsHdfsDTO;
import com.hexinfo.dmpro.sparing.model.HiveTableInfo;
import com.hexinfo.dmpro.sparing.model.ScanMetadata;
import com.hexinfo.dmpro.sparing.service.HiveToOracleService;
import lombok.extern.flogger.Flogger;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import javax.sql.DataSource;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.math.BigDecimal;
import java.nio.charset.StandardCharsets;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.stream.Collectors;

/**
 * hive元数据信息获取
 */
@Service
@Slf4j
public class HiveToOracleImpl  implements HiveToOracleService {

    @Value("${hive.metadata.version}")
    private String version ;
    private String versionJQ = "hiveOracle_prd";
    private String versionWG = "hiveOracle_wgprd";
    @Value("${file.path}")
    private String excelFilePath ;
    @Value("${file.hdfs}")
    private String filePath;

    @Override
    public Integer selectHiveTableCount() {
        DataSource ds = DSFactory.get(version);
        Db.use(ds);
        try (Connection conn= ds.getConnection()){
            Entity count = SqlExecutor.query(conn,CommonConstants.HIVE_SQL_ORACLE_ROWNUM_COUNT,new EntityHandler());
            Integer countInt = ((BigDecimal)count.get("COUNT")).intValue();
            return countInt;
        } catch (Exception e) {
            log.warn("selectHiveTableCount error",e);
        }
        return null;
    }

    /**
     * 获取hive元数据信息
     * @return
     */
    @Override
    public List<ScanMetadata> selectHiveTableInfo(int sta,int end) {
        DataSource ds = DSFactory.get(version);
        Db.use(ds);
        try (Connection conn= ds.getConnection()){
            List<ScanMetadata> scanMetadata = (List<ScanMetadata>) SqlExecutor.query(conn, CommonConstants.HIVE_SQL_ORACLE_ROWNUM,new BeanListHandler(ScanMetadata.class),sta,end);
            return scanMetadata;
        } catch (Exception e) {
            log.warn("selectHiveTableInfo error",e);
        }
        return null;
    }

    /**
     * 查询库下全部表信息
     * @param tblDatabaseName
     * @return
     */
    @Override
    public List<ScanMetadata> selectHiveTableList(String tblDatabaseName,String tblNameWhere) {
        DataSource ds = DSFactory.get(version);
        Db.use(ds);
        try (Connection conn= ds.getConnection()){
            String sql = StrUtil.replace(CommonConstants.HIVE_SQL_ORACLE_TABLE_LIST,"${sql}",tblNameWhere);
            List<ScanMetadata> scanMetadata = (List<ScanMetadata>) SqlExecutor.query(conn, sql,new BeanListHandler(ScanMetadata.class),tblDatabaseName);
            return scanMetadata;
        } catch (Exception e) {
            log.warn("selectHiveTableList error",e);
        }
        return null;
    }

    /**
     * 单库下全表名称
     * @param dbName
     * @return
     */
    @Override
    public List<String> selectHiveComplete(String dbName) {
        DataSource ds = DSFactory.get(version);
        Db.use(ds);
        try (Connection conn= ds.getConnection()){
            List<String> tableNameList = (List<String>) SqlExecutor.query
                    (conn, CommonConstants.HIVE_SQL_ORACLE_COMPLETE,new BeanListHandler(String.class),dbName);
            return tableNameList;
        } catch (Exception e) {
            log.warn("selectHiveComplete error",e);
        }
        return null;
    }

    @Override
    public List<String> selectHiveTableHDFS(String tableName, String partNameWhere,String pkeyPartName, int year, String format, String dbName) {
        DataSource ds = DSFactory.get(version);
        Db.use(ds);
        try (Connection conn= ds.getConnection()){
            String sql = StrUtil.replace(CommonConstants.HIVE_SQL_ORACLE_TWO_YEAR,"${sql}",partNameWhere);
            List<String> stringList = (List<String>) SqlExecutor.query
                    (conn, StrUtil.replace(sql,"${day}",String.valueOf(year)),new BeanListHandler(String.class),dbName,tableName,pkeyPartName,format);
            return stringList;
        } catch (Exception e) {
            log.warn("selectHiveTableHDFS error",e);
        }
        return null;
    }

    /**
     * 表分区区间查询
     * @param tableName
     * @param partNameWhere
     * @param pkeyPartName
     * @param staTime
     * @param endTime
     * @param format
     * @param dbName
     * @return
     */
    @Override
    public List<String> selectHiveTableHDFSBee(String tableName, String partNameWhere, String pkeyPartName, String staTime, String endTime, String format, String dbName) {
        DataSource ds = DSFactory.get(version);
        Db.use(ds);
        try (Connection conn= ds.getConnection()){
            String sql = StrUtil.replace(CommonConstants.HIVE_SQL_ORACLE_BEE,"${sql}",partNameWhere);
            String day = "and pkv.PART_KEY_VAL BETWEEN '{}' AND '{}'";
            String bee = "";
            switch (format.length()){
                case 8:
                    bee = StrUtil.format(day,staTime,endTime);
                    break;
                case 6:
                    bee = StrUtil.format(day,staTime.substring(0,6),endTime.substring(0,6));
                    break;
                case 4:
                    bee = StrUtil.format(day,staTime.substring(0,4),endTime.substring(0,4));
                    break;
                default:
                    bee = StrUtil.format(day,staTime,endTime);
            }

            List<String> stringList = (List<String>) SqlExecutor.query
                    (conn, StrUtil.replace(sql,"${day}",bee),new BeanListHandler(String.class),dbName,tableName,pkeyPartName);
            return stringList;
        } catch (Exception e) {
            log.warn("selectHiveTableHDFSBee error",e);
        }
        return null;
    }

    @Override
    public List<String> selectHivePkeyName(String tableName,String dbName) {
        DataSource ds = DSFactory.get(version);
        Db.use(ds);
        try (Connection conn= ds.getConnection()){
            List<String> pkeyNameList = (List<String>) SqlExecutor.query
                    (conn, CommonConstants.HIVE_SQL_ORACLE_PKEY_NAME,new BeanListHandler(String.class),tableName,dbName);
            return pkeyNameList;
        } catch (Exception e) {
            log.warn("selectHivePkeyName error",e);
        }
        return null;
    }

    @Override
    public List<String> selectHiveLocation(String where,String tableName, String dbName) {
        DataSource ds = DSFactory.get(version);
        Db.use(ds);
        try (Connection conn= ds.getConnection()){
            String sql = StrUtil.replace(CommonConstants.HIVE_SQL_ORACLE_HDFS_LOCATION,"${sql}",where);
            List<String> pkeyNameList = (List<String>) SqlExecutor.query
                    (conn, sql,new BeanListHandler(String.class),tableName,dbName);
            return pkeyNameList;
        } catch (Exception e) {
            log.warn("selectHiveLocation error",e);
        }
        return null;
    }

    @Override
    public List<String> selectHiveLocationUat(String dbName, String tableName) {
        DataSource ds = DSFactory.get(version);
        Db.use(ds);
        try (Connection conn= ds.getConnection()){
            List<String> pkeyNameList = (List<String>) SqlExecutor.query
                    (conn, CommonConstants.HIVE_SQL_ORACLE_UAT_HDFS_LOCATION,new BeanListHandler(String.class),dbName,tableName);
            return pkeyNameList;
        } catch (Exception e) {
            log.warn("selectHiveLocationUat error",e);
        }
        return null;
    }

    @Override
    public List<CreateTableDTO> selectHiveLatestTable(String dbName,long staTime, long endTime) {
        DataSource ds = DSFactory.get(version);
        Db.use(ds);
        try (Connection conn= ds.getConnection()){
            List<CreateTableDTO> list = (List<CreateTableDTO>) SqlExecutor.query
                    (conn, CommonConstants.HIVE_SQL_ORACLE_LATEST_TABLE,new BeanListHandler(CreateTableDTO.class),staTime,endTime,dbName);
            return list;
        } catch (Exception e) {
            log.warn("selectHiveLatestTable error",e);
        }
        return null;
    }

    @Override
    public Integer hiveSqlOracleIncrementCount(Long staTime,Long endTime) {
        DataSource ds = DSFactory.get(version);
        Db.use(ds);
        try (Connection conn= ds.getConnection()){
            Entity count = SqlExecutor.query(conn, CommonConstants.HIVE_SQL_ORACLE_INCREMENT_COUNT,new EntityHandler(),staTime,endTime,staTime,endTime);
            return ((BigDecimal)count.get("COUNT")).intValue();
        } catch (Exception e) {
            log.warn("hiveSqlOracleIncrementCount error",e);
        }
        return null;
    }

    /**
     * 增量获取元数据信息
     * @param staTime 上一次时间戳
     * @param endTime 当前时间戳
     * @param sta 分页开始
     * @param end 分页结束
     * @return
     */
    @Override
    public List<ScanMetadata> hiveSqlOracleIncrement(Long staTime,Long endTime,int sta, int end) {
        DataSource ds = DSFactory.get(version);
        Db.use(ds);
        try (Connection conn= ds.getConnection()){
            List<ScanMetadata> scanMetadata = (List<ScanMetadata>) SqlExecutor.query(conn, CommonConstants.HIVE_SQL_ORACLE_INCREMENT,new BeanListHandler(ScanMetadata.class),staTime,endTime,staTime,endTime,sta,end);
            return scanMetadata;
        } catch (Exception e) {
            log.warn("hiveSqlOracleIncrement error",e);
        }
        return null;
    }



    /**
     * 数据写入txt(元数据扫描表)
     * @param list
     * @param title
     */
    @Override
    public void txtFileSD(List<ScanMetadata> list, String title) {
        String path = excelFilePath + title + ".txt";
        String content = "";
        for (ScanMetadata l: list) {
            content += "库名为:"+l.getTblDatabaseName()+"表名为:"+l.getTblName()+"\n\r";
        }
        // 写入文本文件
        FileUtil.writeUtf8String(content, path);
    }

    /**
     * 数据写入excel(元数据扫描表)
     * @param list
     * @param title
     */
    @Override
    public void excelFileSD(List<ScanMetadata> list, String title) {
        String path = excelFilePath + title + ".csv";
        File dcFile = null;
        //判断路径下是否有文件
        if(!FileUtil.isFile(path)){
            FileUtil.touch(path);
        }
        dcFile = new File(path);
        //数据写入文件excel
        try (FileOutputStream fileOutputStream = new FileOutputStream(dcFile);) {
            ExportBeanExcel<ScanMetadata> excel = new ExportBeanExcel<ScanMetadata>();
            List<String> headerName = new ArrayList<>();
            headerName.add("库名");
            headerName.add("表名");
//            headerName.add("分区值");
//            headerName.add("分区名");
//            headerName.add("HDFS存储路径");
            List<String> headersId = new ArrayList<>();
            headersId.add("tblDatabaseName");
            headersId.add("tblName");
//            headersId.add("partName");
//            headersId.add("pkeyPartName");
//            headersId.add("location");
            excel.exportExcel(title, headerName, headersId, list, fileOutputStream);
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
    @Override
    public int lsHdfs(LsHdfsDTO dto){
//        "sh /opt/script/ls_hdfs_files.sh hdfs://xqcdh01.cdh.com:8020/user/hive/warehouse/ods.db/  "
        Date sta  = DateUtil.date();
        int exe = ConnExecuteUtil.execShellCode(dto.getSh()+" "+dto.getHdfs()+" "+dto.getHdfsPath());
        FileReader fileReader = new FileReader(dto.getHdfsPath());
        List<String> hang = fileReader.readLines();
        hang.forEach(h ->{
            //存储算法 根据数据库名称切割 存储到新的txt文件中
            String[] base =  StrUtil.split(h,dto.getHdfs());
            if(ObjectUtil.isNotEmpty(base)){
                if(base.length>1){
                    List<String> table  = StrSpliter.split(base[1],StrUtil.SLASH,true,true);
                    if(ObjectUtil.isNotEmpty(table)){
                        String tableName = table.get(0);
                        String path = filePath+dto.getBaseName()+"/"+tableName+".txt";
                        System.out.println(path);
                        //文件是否存在
                        if(FileUtil.exist(path)){
                            FileUtil.touch(path);
                        }
                        FileUtil.appendString(h +"\n",path, StandardCharsets.UTF_8);
                    }
                }
            }
        });
        Date end  = DateUtil.date();
        int betS = (int) DateUtil.between(sta, end, DateUnit.SECOND);
        System.out.println(betS);
        return betS;
    }

    @Override
    public List<CreateTableDTO> hiveQueryLibraryTable(String flag) {
        DataSource ds = null;
        if(ScanCommonConstants.ClusterName.JQ.value.equals(flag)){
            ds = DSFactory.get(versionJQ);
        }else{
            ds = DSFactory.get(versionWG);
        }
        Db.use(ds);
        try (Connection conn= ds.getConnection()){
            List<CreateTableDTO> list = (List<CreateTableDTO>) SqlExecutor.query
                    (conn, CommonConstants.HIVE_SQL_ORACLE_TABLE_LIST_WHERE,new BeanListHandler(CreateTableDTO.class));
            return list;
        } catch (Exception e) {
            log.warn("hiveQueryLibraryTable error",e);
        }
        return null;
    }


}
