package com.ideal.lx_mss.service.impl;


import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.ideal.lx_mss.common.MssLogs;
import com.ideal.lx_mss.common.RedisCache;
import com.ideal.lx_mss.common.TableInfo;
import com.ideal.lx_mss.common.entity.KafkaTableEntity;
import com.ideal.lx_mss.common.entity.KafkaTopicEntity;
import com.ideal.lx_mss.common.entity.MssKafkaEntity;
import com.ideal.lx_mss.entity.KafkaTopicTableEntity;
import com.ideal.lx_mss.entity.PullFileEntity;
import com.ideal.lx_mss.entity.PullInfoEntity;
import com.ideal.lx_mss.mapper.*;
import com.ideal.lx_mss.service.KafkaDataService;
import com.ideal.lx_mss.utils.*;
import org.json.JSONObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import java.util.*;
import java.util.concurrent.*;

@Service
public class KafkaDataServiceImpl implements KafkaDataService {
    @Autowired
    private KafkaTopicMapper topicMapper;

    @Autowired
    private KafkaPullConsumerService kafkaPullConsumerService;

    @Autowired
    private RedisCache redisCache;

    @Autowired
    private AnalysisUtils analysisUtils;
    @Autowired
    private CsvUtils csvUtils;

    @Autowired
    private TextUtils textUtils;

    @Autowired
    private BasicMapper basicMapper;

    @Autowired
    private MssLogsMapper mssLogsMapper;
    @Autowired
    private PullFileMapper pullFileMapper;

    @Autowired
    private SqlUtils sqlUtils;
    @Autowired
    private KafkaTopicTableMapper kafkaTopicTableMapper;


    /**
     * 获取topic列表
     * @return
     */
    @Override
    public List<KafkaTopicEntity> getKafkaTopicList() {
        return topicMapper.getKafkaTopicList();
    }

    /**
     * 根据条件分页查询kafka拉取信息
     *
     * @param mssKafkaEntity 类型信息
     * @return
     */
    @Override
    public List<MssKafkaEntity> selectKafkaList(MssKafkaEntity mssKafkaEntity) {
        List<MssKafkaEntity> mssKafkaEntities = topicMapper.selectKafkaList(mssKafkaEntity);
        return mssKafkaEntities;

    }

    /**
     * kafka数据拉取
     * @param mssKafkaEntity
     * @return
     */
    @Override
    public HashMap<String, Object> pullKafkaDataByTopic(MssKafkaEntity mssKafkaEntity) throws ExecutionException, InterruptedException {

        //此次记录id
        String formId = CommonUtil.createFormId();
        //日志
        MssLogs mssLogs = new MssLogs(UUID.randomUUID().toString(), "pullKafkaDataByTopic", "KafkaDataService", "info", mssKafkaEntity.getTopic(), "", "访问拉取接口", CommonUtil.getNowDate(), formId);
        mssLogsMapper.insert(mssLogs);
        //从kafka拉取数据
        boolean ispull = kafkaPullConsumerService.pullMessages(mssKafkaEntity.getTopic(),formId);
        HashMap<String, Object> map = new HashMap<>();
        map.put("topic",mssKafkaEntity.getTopic());
        //数据拉取日志
        if (ispull){
            //拉取成功，根据file路径找文件分析
            QueryWrapper<PullFileEntity> pullFileEntityQueryWrapper = new QueryWrapper<>();
            pullFileEntityQueryWrapper.eq("form_id",formId).eq("type","info");
            PullFileEntity pullFileEntity = pullFileMapper.selectOne(pullFileEntityQueryWrapper);

            //表结构对比

            //分析
            analysisUtils.structureAnalysisByFile(formId,pullFileEntity.getFileUrl(),mssKafkaEntity.getTopic());

            //转到正式表，有异常的表不更新正式表
            Set<String> errTables = basicMapper.getTableNameTempSet(formId);
            QueryWrapper<KafkaTopicTableEntity> kafkaTopicTableEntityQueryWrapper = new QueryWrapper<>();
            kafkaTopicTableEntityQueryWrapper.notIn("table_name",errTables);
            List<KafkaTopicTableEntity> kafkaTopicTableEntities = kafkaTopicTableMapper.selectList(kafkaTopicTableEntityQueryWrapper);


            ArrayList<HashMap<String, Object>> hashMaps = new ArrayList<>();
            //无异常的插入正式表（全量）
            for (KafkaTopicTableEntity kafkaTopicTableEntity : kafkaTopicTableEntities) {
                //清空正式表（全量）
                basicMapper.truncateTables(kafkaTopicTableEntity.getTableName());

                //查询正式表表结构
                String[] tableField = basicMapper.getTableField("mss_kafka",kafkaTopicTableEntity.getTableName());
                //拼sql语句
                String sql = sqlUtils.buildDevelopInsertSelectQuery("mss_kafka", kafkaTopicTableEntity.getTempTableName(),kafkaTopicTableEntity.getTableName() , tableField);
                //插入
                basicMapper.insertTableUtilsMapper(sql);



            }
            QueryWrapper<KafkaTopicTableEntity> queryWrapper = new QueryWrapper<>();
            queryWrapper.eq("topic_name",mssKafkaEntity.getTopic());
            List<KafkaTopicTableEntity> entities = kafkaTopicTableMapper.selectList(kafkaTopicTableEntityQueryWrapper);

            for (KafkaTopicTableEntity entity : entities) {
                int count = basicMapper.selectCount(entity.getTableName());
                HashMap<String, Object> tableInfo = new HashMap<>();
                tableInfo.put(entity.getTableName(),count);
                hashMaps.add(tableInfo);
            }


            map.put("tables",hashMaps);

        }

        return map;
    }



    /**
     * kafka数据打标
     * @param mssKafkaEntity
     */
    @Override
    public void mssKafkaDataMark(MssKafkaEntity mssKafkaEntity) {
        //查询对应topic的数据
        List<MssKafkaEntity> list = topicMapper.selectKafkaList(mssKafkaEntity);
        if (list.size()>0){
//            遍历达标
            for (MssKafkaEntity kafkaEntity : list) {
                //String 转json
                JSONObject jsonObject = new JSONObject(kafkaEntity.getMesValue());
                //key :datatype等于DICT就是字典datatype，其他是数据 itfcode为表名，
                JSONObject datatype = jsonObject.getJSONObject("datatype");
                System.out.println(datatype);
            }
        }

    }

    @Override
    public List<KafkaTopicEntity> allPullKafkaDataByTopic() throws InterruptedException {
        //获取所有topic
        List<KafkaTopicEntity> kafkaTopicList = topicMapper.getKafkaTopicList();

        int threadNum = 10; // 线程数
        int dataSize = kafkaTopicList.size(); // 数据量
        int pageSize = dataSize / threadNum; // 每个线程处理的数据量

        // 创建线程池
        ExecutorService executorService = Executors.newFixedThreadPool(threadNum);
        List<Future<Integer>> results = new ArrayList<>();

        for (int i = 0; i < threadNum; i++) {
            int startIndex = i * pageSize;
            int endIndex = (i == threadNum - 1) ? dataSize : (i + 1) * pageSize;
            List<KafkaTopicEntity> subList = kafkaTopicList.subList(startIndex, endIndex);

            Callable<Integer> callable = new Callable<Integer>() {
                @Override
                public Integer call() throws Exception {
                    for (KafkaTopicEntity kafkaTopicEntity : subList) {
                        boolean isPull = kafkaPullConsumerService.pullMessages(kafkaTopicEntity.getTopic(),null);
                       List<HashMap<String,Object>> list = new ArrayList<>();
                        if (list.size()!=0){
                            String topic = String.format("%s_%s", kafkaTopicEntity.getTopic(), System.currentTimeMillis());
                            for (Map<String, Object> map : list) {
                                //存redis 追加
                                redisCache.pushList(topic,map);
                            }

                            //清除原本topic
                            redisCache.deleteObject(kafkaTopicEntity.getTopic());
                            redisCache.updateKey(topic,kafkaTopicEntity.getTopic());
                        }
                    }


                    return 0;
                }
            };
            results.add(executorService.submit(callable));
        }

        executorService.shutdown();

        // 等待所有线程执行完成
        while (!executorService.isTerminated()) {
            Thread.sleep(100);
            //获取topic对应的长度
            for (KafkaTopicEntity kafkaTopicEntity : kafkaTopicList) {
                Integer listLenght = redisCache.getListLenght(kafkaTopicEntity.getTopic());
                kafkaTopicEntity.setCount(listLenght);
            }
        }





        return kafkaTopicList;



    }
}
