package com.itc.bi.kafaka;

import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.util.ObjectUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.core.toolkit.IdWorker;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.itc.bi.constant.KafkaConstant;
import com.itc.bi.entity.*;
import com.itc.bi.handler.dataSet.DataSet;
import com.itc.bi.handler.dataSet.SqlCode;
import com.itc.bi.kafaka.DTO.DataSetKafkaDTO;
import com.itc.bi.service.*;
import com.itc.bi.service.impl.ClickHouseJDBCService;
import com.itc.common.core.exception.ServiceException;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Lazy;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component;
import org.springframework.stereotype.Service;

import java.util.Date;
import java.util.List;

/**
 * @BelongsProject: itc-airport-cloud
 * @BelongsPackage: com.itc.bi.kafaka
 * @Author: hecaiy
 * @CreateTime: 2024-07-04  10:43
 * @Description: TODO python数据加工操作
 * @Version: 1.0
 */
@Component
@Slf4j
@Service
public class DataSetSQLKafkaListener {
    @Autowired
    private DirectoryService directoryService;
    @Autowired
    private ClickHouseJDBCService clickHouseJDBCService;
    @Autowired
    private DataSetColumnService dataSetColumnService;
    @Autowired
    private SqlCode sqlCode;
    @Autowired
    private DirectDataSetService directDataSetService;
    @Autowired
    private XxlJobInfoService xxlJobInfoService;
    @Autowired
    private DataSynrecordService dataSynrecordService;
    @Autowired
    XxlJobGroupService groupService;
    @Autowired
    private SyncDataSetService syncDataSetService;

    @Value("${xxl.job.executor.appname}")
    private String jobName;


    /**
     * 分页最大值
     */
    private static final Integer PAGESIZE = 10000;

    /**
     * 1.python构建数据集
     * 2.创建主附表
     * 3.数据同步
     * 4.xxl-job任务调度
     * @param record
     */
    @KafkaListener(groupId = "bi_data_set_sql",topics = KafkaConstant.KAFKA_TOPIC_DATA_SET_SQL)
    public void dataSetListener(ConsumerRecord record){
        String message = (String) record.value();
        DataSetKafkaDTO directoryParam = JSON.parseObject(message, DataSetKafkaDTO.class);
        Long dtId = directoryParam.getDtId();
        DirectoryEntity directoryEntity = directoryService.getById(dtId);
        if(ObjectUtil.isEmpty(directoryEntity)){
            log.error("数据集信息为空");
            return;
        }
        if(ObjectUtil.isNotEmpty(directoryEntity.getTableName())){
            log.error("数据集已存在");
            return;
        }
        directoryEntity.setTableName(KafkaConstant.DATA_SET_TYPE_SQL_MASTER+dtId);
        directoryEntity.setVersion(KafkaConstant.DATA_SET_TYPE_SQL_SLAVE+dtId);
        //数仓数据表创建，数据同步
        List<DataSetColumnEntity> list = dataSetColumnService.list(Wrappers.<DataSetColumnEntity>lambdaQuery().eq(DataSetColumnEntity::getDataSetId, dtId)
                .orderByAsc(DataSetColumnEntity::getColumnPosition));
        if(ObjectUtil.isEmpty(list)){
            log.error("数据集字段为空");
            return;
        }
        clickHouseJDBCService.createTableByColumnList(directoryEntity.getTableName(), list,"");
        clickHouseJDBCService.createTableByColumnList(directoryEntity.getVersion(),list,"");
        DirectDataSetEntity dataSetEntity = directDataSetService.getById(dtId);
        syncDataSetService.insertData(dataSetEntity,directoryEntity.getDtId(),directoryEntity.getTableName(),sqlCode,clickHouseJDBCService);
        this.directoryService.updateById(directoryEntity);
        //TODO cron表达式为空，不创建调度任务
        if(StringUtils.isEmpty(directoryParam.getCron())){
            return ;
        }
        XxlJobGroupEntity groupEntity = groupService.getOne(Wrappers.<XxlJobGroupEntity>lambdaQuery().eq(XxlJobGroupEntity::getAppName, jobName).last("limit 1"));
        XxlJobInfoEntity xxlJobInfoEntity = new XxlJobInfoEntity();
        xxlJobInfoEntity.setJobGroup(groupEntity.getId());
        xxlJobInfoEntity.setJobDesc("数据集SQL调度任务"+directoryEntity.getName());
        xxlJobInfoEntity.setAddTime(new Date());
        xxlJobInfoEntity.setUpdateTime(new Date());
        xxlJobInfoEntity.setAuthor("lowcode_bi");
        xxlJobInfoEntity.setAlarmEmail("waytale@waytale.cn");
        xxlJobInfoEntity.setScheduleType("CRON");
        xxlJobInfoEntity.setScheduleConf(directoryParam.getCron());
        xxlJobInfoEntity.setMisfireStrategy("DO_NOTHING");
        xxlJobInfoEntity.setExecutorRouteStrategy("FIRST");
        xxlJobInfoEntity.setExecutorHandler("SqlHandler");
        JSONObject executorParam = new JSONObject();
        executorParam.put("dtId",dtId);
        xxlJobInfoEntity.setExecutorParam(JSON.toJSONString(executorParam));
        xxlJobInfoEntity.setExecutorFailRetryCount(0);
        xxlJobInfoEntity.setGlueType("BEAN");
        xxlJobInfoEntity.setExecutorBlockStrategy("SERIAL_EXECUTION");
        xxlJobInfoEntity.setExecutorTimeout(0);
        xxlJobInfoService.saveOrUpdateXxlJob(xxlJobInfoEntity);
        //记录创建的调度任务
        DataSynrecordEntity entity = dataSynrecordService.getOne(Wrappers.<DataSynrecordEntity>lambdaQuery()
                .eq(DataSynrecordEntity::getDtId, dtId));
        DataSynrecordEntity dataSynrecordEntity = new DataSynrecordEntity();
        //若存在调度任务 停止原来的调度任务、删除原来的调度任务
        if(ObjectUtil.isNotEmpty(entity)){
            BeanUtil.copyProperties(entity,dataSynrecordEntity);
            xxlJobInfoService.stop(entity.getTaskId());
            xxlJobInfoService.removeJob(entity.getTaskId());
        }
        dataSynrecordEntity.setDtId(dtId);
        dataSynrecordEntity.setTableId(dtId);
        dataSynrecordEntity.setTaskId(xxlJobInfoEntity.getId());
        dataSynrecordEntity.setId(dataSynrecordEntity.getId());
        dataSynrecordEntity.setDirId(directoryEntity.getPid());
        dataSynrecordEntity.setTargetTableName(directoryEntity.getTableName());
        dataSynrecordEntity.setCron(directoryParam.getCron());
        dataSynrecordEntity.setIsExec(1);
        dataSynrecordEntity.setSynType(1);
        dataSynrecordService.saveOrUpdate(dataSynrecordEntity);
        //启动执行
        xxlJobInfoService.start(xxlJobInfoEntity.getId());
    }

    /**
     * 异步线程处理数据
     * @param dataSetEntity
     * @param directoryEntity
     */
    @Async("asyncServiceExecutor")
    public void insertData(DirectDataSetEntity dataSetEntity, DirectoryEntity directoryEntity){
        int i =1;
        clickHouseJDBCService.emptyTable(directoryEntity.getTableName());
        while (i>0){
            List<Object> objects = sqlCode.previewData(dataSetEntity,i,PAGESIZE);
            i++;
            if(CollUtil.isNotEmpty(objects) && objects.size()>0){
                if(objects.size()<PAGESIZE){
                    //赋值0 跳出循环
                    i=0;
                }
                clickHouseJDBCService.insertDataByTableNameObject(directoryEntity.getTableName(),objects);
            }else {
                i=0;
            }
        }
    }
}
