package com.risk.kernel.service.impl;

import com.risk.common.core.domain.AjaxResult;
import com.risk.common.utils.StringUtils;
import com.risk.kernel.config.GmfKeFuTestJdbcConfig;
import com.risk.kernel.domain.KfConfig;
import com.risk.kernel.domain.KfDataOperate;
import com.risk.kernel.domain.KfDataOperateTask;
import com.risk.kernel.mapper.KfConfigMapper;
import com.risk.kernel.mapper.KfDataOperateMapper;
import com.risk.kernel.mapper.KfDataOperateTaskMapper;
import com.risk.kernel.service.IKfDataOperateService;
import com.risk.kernel.service.JdbcSqlBuildService;
import com.risk.kernel.utils.DateUtil;
import com.risk.kernel.utils.ExceptionUtil;
import com.risk.kernel.utils.RandomUtil;
import com.risk.kernel.vo.DataImportVo;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;

import javax.annotation.Resource;
import java.sql.SQLException;
import java.time.ZonedDateTime;
import java.util.*;
import java.util.stream.Collectors;

/**
 * @author: Liwen
 * @createTime: 2024/11/24
 * @description:
 */
@Service
@Slf4j
public class KfDataOperateServiceImpl implements IKfDataOperateService {
    @Autowired
    private KfDataOperateMapper kfDataOperateMapper;

    @Autowired
    private KfConfigMapper kfConfigMapper;

    @Resource(name = "hiveJdbcTemplate")
    private JdbcTemplate hiveJdbcTemplate;

    @Autowired
    private JdbcSqlBuildService jdbcSqlBuildService;

    @Autowired
    private GmfKeFuTestJdbcConfig gmfKeFuTestJdbcConfig;
    @Autowired
    private KfDataOperateTaskMapper kfDataOperateInfoMapper;


    /**
     * 查询列表
     */
    @Override
    public List<KfDataOperate> selectKfDataOperateList(String mobile) {
        return kfDataOperateMapper.selectKfDataOperateList(mobile);
    }


    /**
     * 批量删除
     */
    @Override
    public int deleteKfDataOperateByIds(Long[] ids) {
        kfDataOperateInfoMapper.deleteKfDataOperateInfoByIds(ids);
        return kfDataOperateMapper.deleteKfDataOperateByIds(ids);
    }


    /**
     * hive导数到mysql
     *
     * @param dataImportVo
     * @return
     */
    @Override
    public AjaxResult hiveDataImportToMysql(DataImportVo dataImportVo) {
        ZonedDateTime startZdt = ZonedDateTime.now();
        ZonedDateTime endZdt = null;
        Map<String, Object> map = new LinkedHashMap<>();
        dataImportVo = queryUserId(dataImportVo);
        if (StringUtils.isEmpty(dataImportVo.getUserId())) {
            return AjaxResult.error(400, "userId未查询到");
        }
//        dataImportVo.setUserId("4e473558cf8111e9bd5702000a8f581d");
        // 查询模块下的库、表
        List<KfConfig> kfConfigTableList = getKfConfigTableList(dataImportVo);
        if (CollectionUtils.isEmpty(kfConfigTableList)) {
            return AjaxResult.error(400, "kf_config表查询=null");
        }
        System.out.println(kfConfigTableList);

        Integer num = 0;
        // 插入任务表记录
        KfDataOperate kfDataOperate = initOperateRecord(dataImportVo);
        for (KfConfig kfConfig : kfConfigTableList) {
            num++;
            KfDataOperateTask task = initTask(kfDataOperate, kfConfig, num);
            log.info("第{}次hive查询：{}", num, kfConfig.getDbName() + "." + kfConfig.getTableName());
            List<Map<String, Object>> HiveResultList = queryHiveList(kfConfig.getQuerySql());
            String key = num + "、" + kfConfig.getDbName() + "." + kfConfig.getTableName();
            String msg = "success, size: " + HiveResultList.size();
            if (CollectionUtils.isEmpty(HiveResultList)) {
                msg = "hive查询 = null";
                map.put(key, msg);
                task.setContent(msg);
                task.setExecuteStatus("3");
                updateOperateInfoStatus(task);
                continue;
            }
            List<String> deleteValues = null;
            try {
                deleteValues = getDeleteValues(kfConfig.getDeleteKey(), HiveResultList);
            } catch (Exception e) {
                e.printStackTrace();
                endZdt = ZonedDateTime.now();
                msg = "根据key：" + kfConfig.getDeleteKey() + ",获取value失败了！" + ExceptionUtil.handleExceptionFormat(e);
                taskAndOperateRecordException(msg, kfDataOperate, task, DateUtil.getTimeConsuming(startZdt, endZdt));
                return AjaxResult.error(400, key + msg);
            }
            // 根据key删除mysql
            try {
                deleteDataKeys(kfConfig.getDbName(), kfConfig.getTableName(), kfConfig.getDeleteKey(), deleteValues);
            } catch (SQLException e) {
                e.printStackTrace();
                endZdt = ZonedDateTime.now();
                msg = "删除测试环境失败了！" + ExceptionUtil.handleExceptionFormat(e);
                taskAndOperateRecordException(msg, kfDataOperate, task, DateUtil.getTimeConsuming(startZdt, endZdt));
                return AjaxResult.error(400, key + msg);
            }
            // 插入sql
            try {
                dataAdd(HiveResultList, kfConfig);
                map.put(key, "success,size:" + HiveResultList.size());
            } catch (SQLException e) {
                endZdt = ZonedDateTime.now();
                e.printStackTrace();
                msg = "插入mysql失败了！" + ExceptionUtil.handleExceptionFormat(e);
                taskAndOperateRecordException(msg, kfDataOperate, task, DateUtil.getTimeConsuming(startZdt, endZdt));
                return AjaxResult.error(400, key + msg);
            }
            task.setContent(msg);
            task.setExecuteStatus("1");
            updateOperateInfoStatus(task);
        }
        log.info("【操作完成】===========================================");
        map.forEach((k, v) -> {
            System.out.println(k + ": " + v);
        });
        endZdt = ZonedDateTime.now();
        // 主任务执行成功
        kfDataOperate.setFlag("1");
        updateDataOperateFlag(kfDataOperate, DateUtil.getTimeConsuming(startZdt, endZdt));
        gmfKeFuTestJdbcConfig.close();
        return AjaxResult.success(map);
    }

    public KfDataOperateTask initTask(KfDataOperate operate, KfConfig kfConfig, Integer num) {
        KfDataOperateTask task = new KfDataOperateTask();
        task.setPid(operate.getId());
        task.setMobile(operate.getMobile());
        task.setDbName(kfConfig.getDbName());
        task.setTableName(kfConfig.getTableName());
        task.setExecuteStatus("0"); // 执行中
        task.setSortBy(num);
        task.setQuerySql(kfConfig.getQuerySql());
        task.setDeleteKey(kfConfig.getDeleteKey());
        addOperateInfo(task);
        return task;
    }

    /**
     * 任务异常处理：修改主任务、子任务状态
     */

    public void taskAndOperateRecordException(String msg, KfDataOperate operate, KfDataOperateTask task, String consume) {
        // 修改主任务状态
        operate.setFlag("2");
        updateDataOperateFlag(operate, consume);

        // 修改子任务
        task.setContent(msg);
        task.setExecuteStatus("2");
        updateOperateInfoStatus(task);
    }

    /**
     * 修改子任务的状态和content
     *
     * @param kfDataOperateTask
     */
    public void updateOperateInfoStatus(KfDataOperateTask kfDataOperateTask) {
        kfDataOperateInfoMapper.updateOperateInfo(kfDataOperateTask);
    }

    /**
     * 添加子任务记录
     *
     * @param kfDataOperateTask
     * @return
     */
    public KfDataOperateTask addOperateInfo(KfDataOperateTask kfDataOperateTask) {
        kfDataOperateInfoMapper.insertKfDataOperateInfo(kfDataOperateTask);
        return kfDataOperateTask;
    }


    /**
     * 修改主任务的结束时间和耗时
     *
     * @param kfDataOperate
     * @param consume
     */
    public void updateDataOperateFlag(KfDataOperate kfDataOperate, String consume) {
        kfDataOperate.setUpdateTime(new Date());
        kfDataOperate.setConsume(consume);
        kfDataOperateMapper.updateKfDataOperate(kfDataOperate);
    }

    /**
     * 初始化主任务执行记录
     *
     * @param dataImportVo
     * @return
     */
    public KfDataOperate initOperateRecord(DataImportVo dataImportVo) {
        if (StringUtils.isEmpty(dataImportVo.getModelName())) {
            dataImportVo.setModelName("0");
        }
        KfDataOperate kfDataOperate = new KfDataOperate();
        kfDataOperate.setId(RandomUtil.getRandomNumber());
        kfDataOperate.setMobile(dataImportVo.getMobileNo());
        kfDataOperate.setModelName(dataImportVo.getModelName());
        kfDataOperate.setFlag("0"); // 执行中
        kfDataOperate.setCreateBy(dataImportVo.getCreateBy());
        kfDataOperateMapper.insertKfDataOperate(kfDataOperate);
        return kfDataOperate;
    }


    /**
     * 数据添加到mysql
     *
     * @param HiveResultList
     * @param kfConfig
     * @throws SQLException
     */
    public void dataAdd(List<Map<String, Object>> HiveResultList, KfConfig kfConfig) throws SQLException {
        for (Map<String, Object> item : HiveResultList) {
            gmfKeFuTestJdbcConfig.execAction(kfConfig.getDbName(), jdbcSqlBuildService.toInsert(kfConfig.getTableName(), item));
        }
    }


    /**
     * 根据key，valueList 删除mysql
     *
     * @param dbName
     * @param tableName
     * @param key
     * @param valueList
     * @return
     * @throws SQLException
     */
    public int deleteDataKeys(String dbName, String tableName, String key, List<String> valueList) throws SQLException {
        log.info("【开始删除mysql测试环境：{}.{}】", dbName, tableName);
        String sql = jdbcSqlBuildService.toDelete(tableName, key, valueList);
        return gmfKeFuTestJdbcConfig.execAction(dbName, sql);
    }


    /**
     * 从list中根据key筛选出value
     *
     * @param key
     * @param hiveResultList
     * @return
     */
    public List<String> getDeleteValues(String key, List<Map<String, Object>> hiveResultList) {
        List<String> valueList = hiveResultList.stream().map(item -> {
            String value = item.get(key) + "";
            return value;
        }).distinct().collect(Collectors.toList());
        return valueList;
    }


    /**
     * 获取hive 列表
     *
     * @param sql
     * @return
     */

    public List<Map<String, Object>> queryHiveList(String sql) {
        log.info("【hive查询sql】:{}", sql);
        List<Map<String, Object>> list = hiveJdbcTemplate.queryForList(sql);
        return list;
    }

    /**
     * 查询模块下的库、表
     *
     * @return
     */
    public List<KfConfig> getKfConfigTableList(DataImportVo dataImportVo) {
        if (StringUtils.isNotEmpty(dataImportVo.getTableName())) {
            String[] split = dataImportVo.getTableName().split(",");
            ArrayList<String> list = new ArrayList<>(Arrays.asList(split));
            dataImportVo.setTabList(list);
        }
        List<KfConfig> list = kfConfigMapper.getKfConfigTableList(dataImportVo);
        if (!CollectionUtils.isEmpty(list)) {
            list = list.stream().map(item -> {
                item.setQuerySql(item.getQuerySql().replaceAll("\\$\\{mobile_no\\}", dataImportVo.getMobileNo()).replaceAll("\\$\\{user_id\\}", dataImportVo.getUserId()));
                return item;
            }).collect(Collectors.toList());
        }
        return list;
    }

    /**
     * 查询userId
     *
     * @param dataImportVo
     * @return
     */
    public DataImportVo queryUserId(DataImportVo dataImportVo) {
        String mobileNo = dataImportVo.getMobileNo();
        String sql = "select u.id,u.mobile_no from passport.t_user as u left join passport.t_customer c on u.id= c.user_id where c.id_no = '${mobileNo}' or u.mobile_no ='${mobileNo}';".replaceAll("\\$\\{mobileNo\\}", mobileNo);
        log.info("【开始查询userId:{}】=================================", sql);
        Map<String, Object> map = hiveJdbcTemplate.queryForMap(sql);
        log.info("【userId:{}】==============================================", map);
        String userId = (String) map.get("id");
        String mobile = (String) map.get("mobile_no");
        if (StringUtils.isNotEmpty(userId)) {
            dataImportVo.setUserId(userId);
        }
        if (StringUtils.isNotEmpty(mobile)) {
            dataImportVo.setMobileNo(mobile);
        }
        return dataImportVo;
    }


}