package cn.ac.iscas.service.sql;

import cn.ac.iscas.base.utils.ParameterUtils;
import cn.ac.iscas.base.utils.SpringUtil;
import cn.ac.iscas.dao.IJobMetadataDao;
import cn.ac.iscas.domain.NodeTask;
import cn.ac.iscas.domain.job.Job;
import cn.ac.iscas.domain.job.JobExecutorInterface;
import cn.ac.iscas.domain.response.Response;
import cn.ac.iscas.service.queue.SqlExecuteQueueService;
import cn.ac.iscas.service.task.TaskScheduler;
import cn.ac.iscas.service.task.flink.FlinkSqlTask;
import cn.ac.iscas.service.task.impl.SimpleTaskListener;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.iscas.datasong.lib.common.Status;
import com.iscas.datasong.lib.util.DataSongJsonUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.*;

/**
 * @author LJian
 * @version 1.0
 * @description: TODO
 * @date 2022/5/26 8:50
 */
@Service
public class FlinkSqlService implements JobExecutorInterface {
    @Autowired
    private TaskScheduler taskScheduler;

    @Autowired
    private IJobMetadataDao jobMetadataDao;

//    @Autowired
//    private SimpMessagingTemplate messagingTemplate;

    @Override
    public Response executeSql(Map dataMap) {
        return genDag(dataMap);
    }

    /*
     * 生成任务执行DAG图
     */
    public Response genDag(Map dataMap) {

        Response response = new Response();
        List<Map<String, Object>> components = null;
        String jobId = null;
        //解析所有组件（前端拖拽的节点）
        components = (List<Map<String, Object>>) dataMap.getOrDefault("content", "");
        //获取jobId
        jobId = dataMap.getOrDefault("jobId", "").toString();
        //如果前端没有传入jobId，则用随机数生成一个
        if (jobId.isEmpty()) {
            jobId = "c" + UUID.randomUUID().toString().replace("-", "").substring(0, 7);
        }
        Map<String, NodeTask> nodeTaskMap = Maps.newConcurrentMap();
        //创建flink中的catalog，名称与jobId一致
        String createCatalogSql = String.format("CREATE CATALOG %s WITH ( 'type'='generic_in_memory' )", jobId);
        //创建flink中的database，名称与jobId一致
        String createDatabaseSql = String.format("CREATE DATABASE %s.%s", jobId, jobId);
        //构建flink的sql执行队列，先加入创建catalog和database的sql语句
        SqlExecuteQueueService sqlExecuteQueueService = SpringUtil.getBean(SqlExecuteQueueService.class);
        sqlExecuteQueueService.add(jobId, createCatalogSql);
        sqlExecuteQueueService.add(jobId, createDatabaseSql);
        //遍历所有组件，
        for (Map<String, Object> component : components) {
            System.out.println();

            String taskId = (String) component.getOrDefault("id", "");
            String type = ParameterUtils.getRequiredString(component, "type");

            String kind = ParameterUtils.getRequiredString(component, "kind");

            String nodeName = ParameterUtils.getOrDefaultString(component, "nodeName", UUID.randomUUID().toString().split("-")[0]);

            //Map<String, Object> parameter = (Map<String, Object>) component.getOrDefault("parameter", new HashMap<>());
            Map<String, Object> parameter = (Map<String, Object>) ((Map<String, Object>) component.getOrDefault("parameter", new HashMap<>())).getOrDefault("param", new HashMap<>());
            List<String> fronts = (List<String>) component.getOrDefault("front", new ArrayList<>());
            Set<String> dependencies = null;
            if (!fronts.isEmpty()) {
                dependencies = Sets.newHashSet(fronts);

            }
            //解析节点，转成nodetask
            NodeTask nodeTask = new FlinkSqlTask(type, kind, parameter, jobId, taskId, dependencies);
            //生成dag图，放入nodeTaskMap
            nodeTaskMap.put(nodeTask.getTaskId(), nodeTask);
        }
        try {
            //启动执行dag图
            taskScheduler.startNodeTasks(jobId, nodeTaskMap, new SimpleTaskListener());
        } catch (RuntimeException e) {
            e.printStackTrace();
            response.setStatus(Status.PARAM_ERROR.getValue());
            response.setData(jobId);
            response.setInfo("调度失败，" + e.getMessage());
            return response;
        }
        ///taskScheduler.startNodeTasks(jobId, nodeTaskMap, new SimpleTaskListener());

        //TODO: to be finished
        response.setStatus(Status.OK.getValue());
        response.setData(jobId);
        response.setInfo("调度成功，等待Flink任务执行！");

        //如果jobId在jobmetadata表中不存在，新建一条数据
        if (jobMetadataDao.getJob(jobId) == null) {
            Job job = new Job();
            job.set_id(jobId);
            job.setName(dataMap.getOrDefault("name", "").toString());
            job.setContent(DataSongJsonUtils.toJson(components));
            job.setCron("");
            job.setCreateTime(Date.from(ZonedDateTime.now(ZoneId.of("Asia/Shanghai")).toInstant()));
            job.setExecuteCount(0);
            jobMetadataDao.save(job);
        }
        //异步返回，后台继续执行flink任务
        return response;

    }




}
