package cn.ac.iscas.service.sql;

import cn.ac.iscas.dao.IJobMetadataDao;
import cn.ac.iscas.domain.job.*;
import cn.ac.iscas.domain.job.period.JobExecutePeriod;
import cn.ac.iscas.domain.response.Response;
import com.alibaba.fastjson.JSON;
import com.iscas.datasong.lib.common.DataSongException;
import com.iscas.datasong.lib.common.Status;
import com.iscas.datasong.lib.util.DataSongJsonUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.util.List;
import java.util.Map;
import java.util.UUID;

@Service
public class SqlService {
    @Autowired
    JobExecutorFactory jobExecutorFactory;

    @Autowired
    IJobMetadataDao jobMetadataDao;

    public boolean executeBatchTasks(List<String> jobIds) throws DataSongException {
        //检测Job状态
        List<Job> jobs = jobMetadataDao.selectByIds(jobIds);
        //todo:监测job状态，待完善
//        for(Job job : jobs) {
//            JobStatus jobStatus = JobStatus.analyze(job.getStatus());
//            if (jobStatus.isRunning()) {
//                throw new DataSongException(Status.PARAM_ERROR, "the job is running, can not run again!");
//            }
//        }

        //提交执行Job
        for(Job job : jobs) {
            executeSql(job.getRunInfo().toString());
        }

        return true;
    }

    public Response executeSql(String data) {
        Response response = new Response();
        if (data == null || data.trim().isEmpty()) {
            response.setData(null);
            response.setInfo("Job content is empty!");
            response.setStatus(Status.PARAM_ERROR.getValue());
            return response;
        }
        //解析前端数据
        Map<String, Object> dataMap = JSON.parseObject(data);
        if (dataMap == null || dataMap.isEmpty()) {
            response.setData(null);
            response.setInfo("Job content is empty!");
            response.setStatus(Status.PARAM_ERROR.getValue());
            return response;
        }

        //todo：测试后删除
        //dataMap.put("jobType", "dameng");
        //根据前端传入的jobType字段解析任务类型，Flink和达梦
        JobType jobType = JobType.valueOf(dataMap.getOrDefault("jobType", "").toString().trim().toUpperCase());
        //通过工厂类进行多态调用
        JobExecutorInterface executor = jobExecutorFactory.createExecutor(jobType);
        //根据job name获取jobId
        String jobId = dataMap.getOrDefault("name", "").toString() + "-" + UUID.randomUUID().toString().replace("-", "").substring(0, 7);
        GlobeJobTypeMap.put(jobId, jobType);
        if (dataMap.get("cycleTime") != null && !dataMap.get("cycleTime").toString().trim().isEmpty()) {
            Long periodTime = Long.valueOf((int) dataMap.get("cycleTime"));
            String periodTimeUnit = dataMap.getOrDefault("cycleUnit", "").toString();
            if (periodTime != null) {
                JobExecutePeriod jobExecutePeriod = new JobExecutePeriod();
                switch (periodTimeUnit) {
                    case "minute":
                        periodTime = periodTime * 60 * 1000;
                        break;
                    case "hour":
                        periodTime = periodTime * 60 * 60 * 1000;
                        break;
                    case "day":
                        periodTime = periodTime * 60 * 60 * 24 * 1000;
                        break;
                    default:
                        break;
                }
                jobExecutePeriod.setPeriod(periodTime);
                jobExecutePeriod.setDelay(2000);
                GlobeJobPeriodMap.put(jobId, jobExecutePeriod);
            }
        }

        dataMap.put("jobId", jobId);
        return executor.executeSql(dataMap);

    }
}
