package com.itc.bi.service.impl;

import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.util.ObjectUtil;
import com.alibaba.fastjson.JSON;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.itc.bi.constant.KafkaConstant;
import com.itc.bi.constant.RedisConstant;
import com.itc.bi.dto.kafka.DataSetDTO;
import com.itc.bi.entity.DirectDataSetEntity;
import com.itc.bi.entity.DirectoryEntity;
import com.itc.bi.handler.dataSet.PythonCode;
import com.itc.bi.handler.dataSet.SqlCode;
import com.itc.bi.mapper.DirectDataSetMapper;
import com.itc.bi.mapper.DirectoryMapper;
import org.redisson.api.RLock;
import org.redisson.api.RedissonClient;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Lazy;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;

import java.util.List;
import java.util.concurrent.TimeUnit;

/**
 * @BelongsProject: itc-airport-cloud
 * @BelongsPackage: com.itc.bi.service.impl
 * @Author: hecaiy
 * @CreateTime: 2024-07-06  14:38
 * @Description: TODO 数据操作类
 * @Version: 1.0
 */
@Service
public class DataSetXxlBobService {
    @Autowired
    @Lazy
    private ClickHouseJDBCService clickHouseJDBCService;
    @Autowired
    private DirectoryMapper directoryMapper;
    @Autowired
    private DirectDataSetMapper directDataSetMapper;
    @Autowired
    private SqlCode sqlCode;
    @Autowired
    private PythonCode pythonCode;
    @Autowired
    private KafkaTemplate<String, Object> kafkaTemplate;

    @Autowired
    private StringRedisTemplate stringRedisTemplate;
    @Autowired
    private RedissonClient redissonClient;

    /**
     * 分页最大值
     */
    private static final Integer PAGESIZE = 10000;

    /**
     * 1.清空附表数据
     * 2.将获取的数据插入附表
     * 3.将主附表替换
     */
    @Async("asyncServiceExecutor")
    public void processSqlData(Long dtId){
        String key = RedisConstant.DATA_SET_PROCESS_KEY + dtId;
        RLock lock = redissonClient.getLock(key);
        boolean booleanRedisson = lock.tryLock();
        //上一次数据同步未执行完毕 等执行完毕后继续执行
        if(!booleanRedisson){
            return;
        }
        try {
            DirectoryEntity directoryEntity = directoryMapper.selectById(dtId);
            //主表
            String masterTable = directoryEntity.getTableName();
            //附表
            String slaveTable = directoryEntity.getVersion();
            DirectDataSetEntity dataSetEntity = directDataSetMapper.selectById(dtId);
            //1.清空表
            clickHouseJDBCService.emptyTable(slaveTable);
            IPage page = sqlCode.page(dataSetEntity, 1, 1);
            //2.插入数据
            long total = page.getTotal();
            long pages = (long)Math.ceil((double)total / PAGESIZE);
            for (int i = 1; i <= pages; i++) {
                List<Object> objects = sqlCode.previewData(dataSetEntity,i,PAGESIZE);
                if(CollUtil.isNotEmpty(objects) && objects.size()>0){
                    clickHouseJDBCService.insertDataByTableNameObject(slaveTable,objects);
                }
            }
            //3.替换主附表
            directoryEntity.setTableName(slaveTable);
            directoryEntity.setVersion(masterTable);
            directoryMapper.updateById(directoryEntity);
            DataSetDTO dataSetDTO = new DataSetDTO();
            dataSetDTO.setDtId(dtId);
            //通知数据加工数据同步服务  开始进行数据加工同步
            kafkaTemplate.send(KafkaConstant.KAFKA_TOPIC_DATA_SET_PROCESS, JSON.toJSONString(dataSetDTO));
        }catch (Exception ex){

        }finally {
            lock.unlock();
        }
    }

    /**
     * python
     * @param dtId
     */
    @Async("asyncServiceExecutor")
    public void processPythonData(Long dtId){
        String key = RedisConstant.DATA_SET_PROCESS_KEY + dtId;
        RLock lock = redissonClient.getLock(key);
        boolean booleanRedisson = lock.tryLock();
        if(!booleanRedisson){
            return;
        }
        try {
            DirectoryEntity directoryEntity = directoryMapper.selectById(dtId);
            String masterTable = directoryEntity.getTableName();
            String slaveTable = directoryEntity.getVersion();
            DirectDataSetEntity dataSetEntity = directDataSetMapper.selectById(dtId);
            clickHouseJDBCService.emptyTable(slaveTable);
            List<Object> list = pythonCode.list(dataSetEntity);
            clickHouseJDBCService.insertDataByTableNameObject(slaveTable,list);
            directoryEntity.setTableName(slaveTable);
            directoryEntity.setVersion(masterTable);
            directoryMapper.updateById(directoryEntity);
            DataSetDTO dataSetDTO = new DataSetDTO();
            dataSetDTO.setDtId(dtId);
            //通知数据加工数据同步服务  开始进行数据加工同步
            kafkaTemplate.send(KafkaConstant.KAFKA_TOPIC_DATA_SET_PROCESS, JSON.toJSONString(dataSetDTO));
        }catch (Exception ex){

        }finally {
            lock.unlock();
        }
    }
}
