package com.dcloud.job;

import com.dcloud.cfg.GlobalConfig;
import com.dcloud.core.client.IClient;
import com.dcloud.core.source.RdbmsSource;
import com.dcloud.dbdrive.IClientFactory;
import com.dcloud.enums.TaskEnum;
import com.dcloud.manage.service.entity.CollectTask;
import com.dcloud.manage.service.entity.CollectTaskSql;
import com.dcloud.manage.service.entity.DBInfoEntity;
import com.dcloud.manage.service.entity.DBParamsEntity;
import com.dcloud.manage.service.ICollectSourceService;
import com.dcloud.manage.service.ICollectTaskService;
import com.dcloud.manage.service.ICollectTaskSqlService;
import com.dcloud.transfer.MqTransferObj;
import com.dcloud.utils.ClassUtils;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.messaging.Message;
import org.springframework.messaging.support.MessageBuilder;
import org.springframework.stereotype.Component;

import java.lang.reflect.Field;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.util.Date;
import java.util.List;
import java.util.Map;

import static java.util.stream.Collectors.groupingBy;

@Component
@Slf4j
public class JobExector {
    @Autowired
    ICollectTaskService iTaskService;

    @Autowired
    ICollectTaskSqlService iSqlService ;

    @Autowired
    ICollectSourceService iSourceService  ;

    @Autowired
    IClientFactory iClientFactory;

    @Autowired
    ObjectMapper objectMapper;

//    @Autowired
//    KafkaTemplate kafkaTemplate ;



    //@Scheduled(cron="0/1 * * * * ?")
    public void scanTaskAndExecute() throws ClassNotFoundException, InstantiationException, IllegalAccessException {
        //当前时间
        LocalDateTime now = LocalDateTime.now();
        Date currentDate = Date.from(now.atZone(ZoneId.systemDefault()).toInstant());
        //查询任务
        List<CollectTask> taskList = iTaskService.queryTaskByState(TaskEnum.TASK_STATE_RUNNING.getContent(),currentDate);
        for(CollectTask task : taskList){
            if(StringUtils.equals(task.getCollectType(),TaskEnum.TASK_SYNC_SELECT.getContent())){
                //查询任务的sql信息
                CollectTaskSql sqlEntity = iSqlService.queryTaskSqlByTaskId(task.getTid());
                if(sqlEntity == null){
                    continue;
                }

                //查询任务sql的数据源信息
                DBInfoEntity dbInfoEntity = iSourceService.queryDbInfoBySourceTid(sqlEntity.getSourceId());
                Map<String, List<DBParamsEntity>> collect = dbInfoEntity.getList().stream().collect(groupingBy(DBParamsEntity::getParamCode));
                //反射构建source类,将数据表参数值根据反射set 插入到对象中
                Class<?> aClass = Class.forName(dbInfoEntity.getClassLoad());
                RdbmsSource defineSource = (RdbmsSource) aClass.newInstance();
                Field[] allFields = ClassUtils.getAllFields(aClass);
                for(Field field : allFields){
                    if(collect.containsKey(field.getName())){
                        String paramValue = collect.get(field.getName()).get(0).getParamValue();
                        field.setAccessible(true);
                        field.set(defineSource,paramValue);
                    }
                }

                //获取客户端
                IClient client = iClientFactory.getDbdriveByCode(dbInfoEntity.getSourceCode());

                //最大数据量
                Long max_task_query = Long.valueOf(GlobalConfig.oneCfg("MAX_TASK_QUERY"));

                //先检测数据量
                Long count = client.execQueryCount(defineSource, sqlEntity.getTaskSql());
                if(count < max_task_query){
                    List list = client.execQuery(defineSource, sqlEntity.getTaskSql());
                    list.stream().forEach(e -> {
                        sendContentToMq(sqlEntity, e);
                    });
                }else{
                    Long offset = 0L; //偏移量
                    Long rows = max_task_query; //行数
                    int total = 0;
                    while(offset+1 < count ){
                        List list = client.execQueryWithLimit(defineSource, sqlEntity.getTaskSql(), offset, rows);
                        list.stream().forEach(e->{
                            //sendContentToMq(sqlEntity, e);
                        });
                        log.info("数据量：" + list.size());
                        total = total+list.size();
                        offset = offset + rows;
                    }

                    log.info("总数据量："+ total);
                }

            }
        }
    }

    /**
     * @Description //异步发送mq消息
     * @param sqlEntity
     * @param e
     * @Author shijf
     * @Date 21:54 2022/8/8
     */
    private void sendContentToMq(CollectTaskSql sqlEntity, Object e) {

        try {
            //将Map数据转化为json 字符串
            Object content = e;
            String fromSource = "source_"+ sqlEntity.getSourceId();
            MqTransferObj mqTransferObj = MqTransferObj.builder().fromSource(fromSource).content(content).build();
            String msg = objectMapper.writeValueAsString(mqTransferObj);
            Message<String> message = MessageBuilder.withPayload(msg).build();

            //kafkaTemplate.send("sync.select.default","task_"+sqlEntity.getTaskTid(),msg);

            //发送到rocketmq
//            rocketMQTemplate.asyncSend("task_"+sqlEntity.getTaskTid()+":tagA", message, new SendCallback() {
//                @Override
//                public void onSuccess(SendResult sendResult) {
//                    //todo
//                    log.info("发送成功");
//                }
//
//                @Override
//                public void onException(Throwable throwable) {
//                    log.info("发送失败");
//                }
//            });
        } catch (JsonProcessingException ex) {
            throw new RuntimeException(ex);
        }
    }
}
