package com.powerdata.system.paimon.impl;

import cn.hutool.core.date.DateUtil;
import com.powerdata.common.utils.SecurityUtils;
import com.powerdata.common.utils.StringUtils;
import com.powerdata.common.utils.bean.BeanUtils;

import com.powerdata.core.paimon.catalog.PDHdfsUtils;
import com.powerdata.core.paimon.catalog.PDHiveUtils;
import com.powerdata.core.paimon.engine.PDPaimonSparkUtils;
import com.powerdata.core.paimon.PDPaimonUtils;
import com.powerdata.system.domain.*;
import com.powerdata.system.domain.param.PaimonSyncTaskParam;
import com.powerdata.system.mapper.PaimonCatalogMapper;
import com.powerdata.system.mapper.PaimonSyncTaskMapper;
import com.powerdata.system.paimon.PaimonSyncTaskService;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.util.ObjectUtils;

import javax.annotation.Resource;
import java.sql.*;
import java.time.Duration;
import java.util.*;
import java.util.Date;
import java.util.stream.Collectors;

/**
 * @author deeprado
 * @version 1.0
 * @description
 * @date 2023/8/29 16:09
 */
@Service
public class PaimonSyncTaskServiceImpl implements PaimonSyncTaskService {
    static Map<String, String> mysqlToPaimonMap = new HashMap<>();

    static {
        mysqlToPaimonMap.put("tinyint", "int");
        mysqlToPaimonMap.put("smallint", "int");
        mysqlToPaimonMap.put("mediumint", "int");
        mysqlToPaimonMap.put("int", "int");
        mysqlToPaimonMap.put("bigint", "long");
        mysqlToPaimonMap.put("float", "float");
        mysqlToPaimonMap.put("double", "double");
        mysqlToPaimonMap.put("decimal", "decimal");
        mysqlToPaimonMap.put("date", "date");
        mysqlToPaimonMap.put("time", "string");
        mysqlToPaimonMap.put("datetime", "timestamp");
        mysqlToPaimonMap.put("char", "string");
        mysqlToPaimonMap.put("varchar", "string");
        mysqlToPaimonMap.put("binary", "bytes");
        mysqlToPaimonMap.put("varbinary", "bytes");
        mysqlToPaimonMap.put("longbolb", "bytes");
        mysqlToPaimonMap.put("json", "string");
    }


    private static final Logger logger = LoggerFactory.getLogger(PaimonSyncTaskServiceImpl.class);

    @Value(value = "${paimonManager.hiveConf}")
    private String uploadHiveFilePath;
    @Value(value = "${paimonManager.hadoopUser}")
    private String hadoopUser;
    @Value(value = "${paimonManager.hiveUser}")
    private String hiveUser;

    @Value(value = "${paimonManager.nameservices}")
    private String nameServer;

    @Resource
    private PaimonSyncTaskMapper paimonSyncTaskMapper;

    @Resource
    private PaimonCatalogMapper paimonCatalogMapper;

    @Override
    public Map<String, Object> taskList(PaimonSyncTaskParam paimonSyncTaskParam) throws Exception {
        final HashMap<String, Object> result = new HashMap<>();
        int pageSize = ObjectUtils.isEmpty(paimonSyncTaskParam.getPageSize()) ? 10 : paimonSyncTaskParam.getPageSize();
        int pageNum = ObjectUtils.isEmpty(paimonSyncTaskParam.getPageNum()) ? 1 : paimonSyncTaskParam.getPageNum();
        String orderByColumn =
                StringUtils.isEmpty(paimonSyncTaskParam.getOrderByColumn()) ? "id" : paimonSyncTaskParam.getOrderByColumn();
        String isAsc = StringUtils.isEmpty(paimonSyncTaskParam.getIsAsc()) ? "desc" : paimonSyncTaskParam.getIsAsc();

        PaimonSyncTaskExample paimonSyncTaskExample = new PaimonSyncTaskExample();
        PaimonSyncTaskExample.Criteria criteria = paimonSyncTaskExample.createCriteria();

        if (StringUtils.isNotEmpty(paimonSyncTaskParam.getSourcetypes())) {
            criteria.andSourcetypesEqualTo(paimonSyncTaskParam.getSourcetypes());
        }

        if (StringUtils.isNotEmpty(paimonSyncTaskParam.getSourceip())) {
            criteria.andSourceipLike("%" + paimonSyncTaskParam.getSourceip() + "%");
        }

        if (StringUtils.isNotEmpty(paimonSyncTaskParam.getSyncdata())) {
            criteria.andSyncdataLike("%" + paimonSyncTaskParam.getSyncdata() + "%");
        }

        if (StringUtils.isNotEmpty(paimonSyncTaskParam.getDistcatalogid())) {
            criteria.andDistcatalogidLike("%" + paimonSyncTaskParam.getDistcatalogid() + "%");
        }

        if (StringUtils.isNotEmpty(paimonSyncTaskParam.getDistdatabase())) {
            criteria.andDistdatabaseLike("%" + paimonSyncTaskParam.getDistdatabase() + "%");
        }

        if (StringUtils.isNotEmpty(paimonSyncTaskParam.getDisttable())) {
            criteria.andDisttableLike("%" + paimonSyncTaskParam.getDisttable() + "%");
        }

        if (StringUtils.isNotEmpty(paimonSyncTaskParam.getStatus())) {
            criteria.andStatusEqualTo(paimonSyncTaskParam.getStatus());
        }

        if (StringUtils.isNotEmpty(paimonSyncTaskParam.getCreater())) {
            criteria.andCreaterLike("%" + paimonSyncTaskParam.getCreater() + "%");
        }

        List<PaimonSyncTask> paimonSyncTaskList = paimonSyncTaskMapper.selectByExample(paimonSyncTaskExample);

        if (ObjectUtils.isEmpty(paimonSyncTaskList) || paimonSyncTaskList.size() == 0) {
            result.put("total", 0);
            result.put("list", null);
            return result;
        }

        result.put("total", paimonSyncTaskList.size());
        paimonSyncTaskExample.setOrderByClause(" " + orderByColumn + " " + isAsc + " limit " + (pageNum - 1) * pageSize + "," + pageSize);
        List<PaimonSyncTask> resultList = paimonSyncTaskMapper.selectByExample(paimonSyncTaskExample)
                .stream().peek(task -> {
                    if ("0".equals(task.getStatus())) {
                        task.setExectimes(DateUtil.formatBetween(new Date(new Long(task.getCreatetime())), new Date()));
                    }
                }).collect(Collectors.toList());
        result.put("list", resultList);
        return result;
    }

    @Override
    public void add(PaimonSyncTask paimonSyncTask) throws Exception {
        String id = paimonSyncTask.getId();
        if (StringUtils.isNotEmpty(id)) {
            PaimonSyncTask paimonSyncTask1 = paimonSyncTaskMapper.selectByPrimaryKey(id);
            if ("0".equals(paimonSyncTask1.getStatus())) {
                throw new Exception("运行中任务不支持再次执行，请重新创建任务");
            }
        }
        paimonSyncTask.setCreater(SecurityUtils.getLoginUser().getUsername());
        paimonSyncTask.setCreatetime(new Date().getTime() + "");
        paimonSyncTask.setId(paimonSyncTask.getCreatetime());
        paimonSyncTask.setStatus("0");
        paimonSyncTask.setOther1("");
        paimonSyncTask.setExectimes("");
        paimonSyncTaskMapper.insert(paimonSyncTask);
        Thread syncTaskThread = new Thread(() -> {
            Date begin = new Date();
            try {
                execSyncTask(paimonSyncTask);
            } catch (Exception e) {
                paimonSyncTask.setStatus("1");
                paimonSyncTask.setOther1(e.getMessage().substring(0, Math.min(e.getMessage().length(), 2048)));
                paimonSyncTask.setExectimes(DateUtil.formatBetween(begin, new Date()));
                paimonSyncTaskMapper.updateByPrimaryKey(paimonSyncTask);
                e.printStackTrace();
            }
        });
        syncTaskThread.setPriority(1);
        syncTaskThread.setName(paimonSyncTask.getSourcetypes() + "_" + paimonSyncTask.getId());
        syncTaskThread.start();
    }

    @Override
    public void stopKafkaTask(PaimonSyncTask paimonSyncTask) throws Exception {
        PaimonSyncTask paimonSyncTask1 = paimonSyncTaskMapper.selectByPrimaryKey(paimonSyncTask.getId());
        Date begin = new Date(Long.parseLong(paimonSyncTask1.getCreatetime()));
        paimonSyncTask1.setStatus("3");
        paimonSyncTask1.setExectimes(DateUtil.formatBetween(begin, new Date()));
        paimonSyncTaskMapper.updateByPrimaryKey(paimonSyncTask1);
    }


    private void execSyncTask(PaimonSyncTask paimonSyncTask) throws Exception {
        int count = 0;
        String distCatalogId = paimonSyncTask.getDistcatalogid();
        PaimonCatalog paimonCatalog = paimonCatalogMapper.selectByPrimaryKey(distCatalogId);
        PaimonSyncTaskParam paimonSyncTaskParam =
                new PaimonSyncTaskParam(paimonCatalog.getTypes(), paimonCatalog.getHiveurl(),
                        paimonCatalog.getHdfsurl(), paimonSyncTask);
        Date begin = new Date();
        //TODO 入湖任务执行逻辑
        switch (paimonSyncTask.getSourcetypes()) {
            case "1":
                count = execSyncTaskOfHdfs(paimonSyncTaskParam);
                break;
            case "2":
                count = execSyncTaskOfHive(paimonSyncTaskParam);
                break;
            case "3":
                count = execSyncTaskOfKafka(paimonSyncTaskParam);
                break;
            case "4":
                count = execSyncTaskOfMysql(paimonSyncTaskParam);
                break;
            default:
                count = 0;
                break;
        }
        PaimonSyncTask paimonSyncTask1 = paimonSyncTaskMapper.selectByPrimaryKey(paimonSyncTask.getId());
        paimonSyncTask1.setExectimes(DateUtil.formatBetween(begin, new Date()));
        paimonSyncTask1.setOther1("总接入数据条数：" + count);
        if (!"3".equals(paimonSyncTask1.getSourcetypes())) {
            paimonSyncTask1.setStatus("2");
        }
        paimonSyncTaskMapper.updateByPrimaryKey(paimonSyncTask1);
    }

    private int execSyncTaskOfMysql(PaimonSyncTaskParam paimonSyncTaskParam) throws Exception {
        String syncdata = paimonSyncTaskParam.getSyncdata();
        String ip = paimonSyncTaskParam.getSourceip();
        String port = paimonSyncTaskParam.getOther2();
        String userName = paimonSyncTaskParam.getOther3();
        String password = paimonSyncTaskParam.getOther4();
        String catalogId = paimonSyncTaskParam.getDistcatalogid();
        String databaseName = paimonSyncTaskParam.getDistdatabase();
        String tableName = paimonSyncTaskParam.getDisttable();
        String types = paimonSyncTaskParam.getTypes();
        String hdfsUrl = paimonSyncTaskParam.getHdfsUrl();
        String hiveUrl = paimonSyncTaskParam.getHiveUrl();

        String url = "jdbc:mysql://" + ip + ":" + port;
        Class.forName("com.mysql.cj.jdbc.Driver");
        Connection connection = null;
        String connectCheck = "Too many connections";
        while ("Too many connections".equals(connectCheck)) {
            try {
                connection = DriverManager.getConnection(url, userName, password);
                connectCheck = "OK";
            } catch (Exception e) {
                Thread.sleep(5000);
                if (e.getMessage().contains("Too many connections")) {
                    connectCheck = "Too many connections";
                } else {
                    connectCheck = e.getMessage();
                }
            }
        }
        try {
            PDPaimonUtils pdPaimonUtils = PDPaimonUtils.build(catalogId, types, hiveUrl, hdfsUrl, hadoopUser, uploadHiveFilePath);
            if (!pdPaimonUtils.tableExists(databaseName, tableName)) {
                String createTableSqlStr =
                        mysqlToPaimonSqlOfCreateTableSql(Objects.requireNonNull(connection), syncdata, catalogId, databaseName, tableName);
                logger.info("execSyncTaskOfMysql_createTableSql:" + createTableSqlStr);
                PDPaimonSparkUtils.build(catalogId, types, hiveUrl, hdfsUrl, null, hadoopUser)
                        .executeSql(createTableSqlStr);
            }
            return pdPaimonUtils.syncMysqlToPaimonTable(Objects.requireNonNull(connection), syncdata, databaseName, tableName);
        } catch (Exception e) {
            e.printStackTrace();
            throw new Exception(e.getMessage());
        } finally {
            Objects.requireNonNull(connection).close();
        }
    }

    @Override
    public void batchAddHiveTask(PaimonSyncTask paimonSyncTask) throws Exception {
        String sourceIp = paimonSyncTask.getSourceip();
        String syncdata = paimonSyncTask.getSyncdata();
        List<String> allTableNames = PDHiveUtils.build(sourceIp, hiveUser).getAllTableNames(syncdata);
        for (String tableName : allTableNames) {
            PaimonSyncTask paimonSyncTask1 = new PaimonSyncTask();
            BeanUtils.copyBeanProp(paimonSyncTask1, paimonSyncTask);
            paimonSyncTask1.setSyncdata(paimonSyncTask.getSyncdata().split("\\.")[0] + "." + tableName);
            paimonSyncTask1.setDisttable(tableName);
            paimonSyncTask1.setId("");
            add(paimonSyncTask1);
        }
    }

    @Override
    public void batchAddMySqlTask(PaimonSyncTask paimonSyncTask) throws Exception {
        String syncdata = paimonSyncTask.getSyncdata();
        String ip = paimonSyncTask.getSourceip();
        String port = paimonSyncTask.getOther2();
        String userName = paimonSyncTask.getOther3();
        String password = paimonSyncTask.getOther4();
        String url = "jdbc:mysql://" + ip + ":" + port;
        Class.forName("com.mysql.cj.jdbc.Driver");
        Connection connection = DriverManager.getConnection(url, userName, password);
        Statement statement = connection.createStatement();
        try {
            statement.execute("use " + syncdata);
            ResultSet resultSet = statement.executeQuery("show tables");
            while (resultSet.next()) {
                String tableName = resultSet.getString(1);
                PaimonSyncTask paimonSyncTask1 = new PaimonSyncTask();
                BeanUtils.copyBeanProp(paimonSyncTask1, paimonSyncTask);
                paimonSyncTask1.setSyncdata(paimonSyncTask.getSyncdata().split("\\.")[0] + "." + tableName);
                paimonSyncTask1.setDisttable(tableName);
                paimonSyncTask1.setId("");
                add(paimonSyncTask1);
            }
            resultSet.close();
        } catch (Exception e) {
            e.printStackTrace();
            throw new Exception(e.getMessage());
        } finally {
            statement.close();
            connection.close();
        }
    }

    private int execSyncTaskOfKafka(PaimonSyncTaskParam paimonSyncTaskParam) throws Exception {
        String id = paimonSyncTaskParam.getId();
        String kafkaIp = paimonSyncTaskParam.getSourceip();
        String groupId = paimonSyncTaskParam.toString();
        String topicName = paimonSyncTaskParam.getSyncdata();

        String catalogId = paimonSyncTaskParam.getDistcatalogid();
        String types = paimonSyncTaskParam.getTypes();
        String hdfsUrl = paimonSyncTaskParam.getHdfsUrl();
        String hiveUrl = paimonSyncTaskParam.getHiveUrl();
        String distDB = paimonSyncTaskParam.getDistdatabase();
        String distTBL = paimonSyncTaskParam.getDisttable();
        String splitStr = paimonSyncTaskParam.getOther2();
        String dataType = paimonSyncTaskParam.getOther1();

        PDPaimonUtils pdPaimonUtils = PDPaimonUtils.build(catalogId, types, hiveUrl, hdfsUrl, hadoopUser, uploadHiveFilePath);

        Properties properties = new Properties();
        properties.put("bootstrap.servers", kafkaIp + ":9092");
        properties.put("group.id", groupId);
        properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        properties.put("auto.offset.reset", "earliest");
        KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(properties);
        kafkaConsumer.subscribe(Collections.singletonList(topicName));
        int count = 0;
        while (!"3".equals(paimonSyncTaskMapper.selectByPrimaryKey(id).getStatus())) {
            logger.info("kafka入湖任务[" + id + "]执行中：" + new Date() + "");
            ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofMillis(5000));
            count += records.count();
            logger.info("kafka入湖任务[" + id + "]执行中：" + records.count());
            ArrayList<String> recordArr = new ArrayList<>();
            for (ConsumerRecord<String, String> record : records) {
                recordArr.add(record.value());
            }
            if (recordArr.size() > 0) {
                if ("json".equals(dataType)) {
                    pdPaimonUtils.addTableDataOfJson(distDB, distTBL, recordArr);
                } else {
                    pdPaimonUtils.addTableData(distDB, distTBL, recordArr, splitStr);
                }
            }
            if (records.count() > 0) {
                PaimonSyncTask paimonSyncTask = paimonSyncTaskMapper.selectByPrimaryKey(id);
                paimonSyncTask.setOther1("接入数据条数：" + count);
                paimonSyncTaskMapper.updateByPrimaryKey(paimonSyncTask);
            }
        }
        kafkaConsumer.close(Duration.ofMillis(1000));
        return count;
    }

    private int execSyncTaskOfHdfs(PaimonSyncTaskParam paimonSyncTaskParam) throws Exception {
        String sourceIP = paimonSyncTaskParam.getSourceip();
        String syncdata = paimonSyncTaskParam.getSyncdata();
        String catalogId = paimonSyncTaskParam.getDistcatalogid();
        String types = paimonSyncTaskParam.getTypes();
        String hdfsUrl = paimonSyncTaskParam.getHdfsUrl();
        String hiveUrl = paimonSyncTaskParam.getHiveUrl();
        String distDB = paimonSyncTaskParam.getDistdatabase();
        String distTBL = paimonSyncTaskParam.getDisttable();
        String splitStr = paimonSyncTaskParam.getOther2();
        PDHdfsUtils pdHdfsUtils =
                PDHdfsUtils.build(sourceIP, "hdfs://" + sourceIP + ":8020", hadoopUser);
        return PDPaimonUtils.build(catalogId, types, hiveUrl, hdfsUrl, hadoopUser, uploadHiveFilePath)
                .syncHDFSToPaimonTable(pdHdfsUtils, syncdata, splitStr, distDB, distTBL);
    }

    private int execSyncTaskOfHive(PaimonSyncTaskParam paimonSyncTaskParam) throws Exception {
        String sourceIP = paimonSyncTaskParam.getSourceip();
        String syncdata = paimonSyncTaskParam.getSyncdata();
        String nodeIP = paimonSyncTaskParam.getOther2();
        Map<String, String> tableMessage = PDHiveUtils.build(sourceIP, hiveUser).getTableMessage(syncdata);
        String tablePath =
                tableMessage.get("filePath").replace(nameServer, nodeIP + ":8020").split("8020")[1];
        String splitStr = tableMessage.get("splitStr");
        String catalogId = paimonSyncTaskParam.getDistcatalogid();
        String types = paimonSyncTaskParam.getTypes();
        String hdfsUrl = paimonSyncTaskParam.getHdfsUrl();
        String hiveUrl = paimonSyncTaskParam.getHiveUrl();
        String distDB = paimonSyncTaskParam.getDistdatabase();
        String distTBL = paimonSyncTaskParam.getDisttable();
        String createSql = tableMessage.get("createSql").replace(syncdata, distTBL);
        PDHdfsUtils pdHdfsUtils =
                PDHdfsUtils.build(sourceIP, "hdfs://" + nodeIP + ":8020", hadoopUser);
        PDPaimonSparkUtils pdPaimonSparkUtils =
                PDPaimonSparkUtils.build(catalogId, types, hiveUrl, hdfsUrl, null, hadoopUser);
        pdPaimonSparkUtils.executeSql("use `" + catalogId + "`");
        pdPaimonSparkUtils.executeSql("use `" + distDB + "`");
        boolean isFlag = true;
        for (Map<String, Object> tables : pdPaimonSparkUtils.executeSql("show tables")) {
            if (distTBL.equals(tables.get("tableName"))) {
                isFlag = false;
                break;
            }
        }
        if (isFlag) {
            pdPaimonSparkUtils.executeSql(createSql);
        }
        return PDPaimonUtils.build(catalogId, types, hiveUrl, hdfsUrl, hadoopUser, uploadHiveFilePath)
                .syncHiveToPaimonTable(pdHdfsUtils, tablePath, splitStr, distDB, distTBL);
    }

    private String mysqlToPaimonSqlOfCreateTableSql(Connection connection, String mysqlTable,
                                                    String catalogId, String databaseName, String tableName) throws Exception {
        StringBuilder paimonCreateTableSql = new StringBuilder("create table " + catalogId + "." + databaseName + "." + tableName + " (");
        Statement statement = connection.createStatement();
        ResultSet resultSet = statement.executeQuery("show create table " + mysqlTable);
        String sql = "";
        while (resultSet.next()) {
            sql = resultSet.getString("Create Table");
        }
        resultSet.close();
        statement.close();

        List<String> sqlList = Arrays.asList(sql.toLowerCase().split("\n"));
        List<String> newCols = sqlList.stream().map(String::trim).filter(col -> col.startsWith("`")).map(col -> {
            String newCol = "";
            String[] comments = col.split(" comment ");
            String[] colMessageArr = comments[0].trim().split(" ");
            String[] types = colMessageArr[1].split("\\(");
            String colType = mysqlToPaimonMap.get(types[0]);
            if (colType == null) {
                colType = "string";
            }
            newCol = colMessageArr[0] + " " + colType;
            if (comments.length == 2) {
                if (!comments[1].trim().endsWith(",")) {
                    comments[1] = comments[1] + ",";
                }
                newCol = newCol + " comment " + comments[1];
            } else {
                newCol = newCol + ",";
            }
            return newCol;
        }).collect(Collectors.toList());
        for (String newCol : newCols) {
            paimonCreateTableSql.append(newCol);
        }
        return paimonCreateTableSql.substring(0, paimonCreateTableSql.length() - 1) + " ) using paimon";
    }

}
