package com.flink.streaming.web.service.impl;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.flink.streaming.web.config.AzKaBanConfig;
import com.flink.streaming.web.enums.BasePyTemplateVariableEnum;
import com.flink.streaming.web.enums.YN;
import com.flink.streaming.web.exceptions.BizException;
import com.flink.streaming.web.mapper.AzkanbanOfflineSQLLogMapper;
import com.flink.streaming.web.mapper.AzkanbanOfflineSQLMapper;
import com.flink.streaming.web.model.entity.AzkanbanOfflineSQL;
import com.flink.streaming.web.model.entity.AzkanbanOfflineSQLLog;
import com.flink.streaming.web.model.param.AzKaBanParam;
import com.flink.streaming.web.model.param.AzKaBanRunParam;
import com.flink.streaming.web.model.vo.AzkanbanOfflineSQLVO;
import com.flink.streaming.web.service.AzKaBanSqlService;
import com.flink.streaming.web.util.AzKaBanUtil;
import com.flink.streaming.web.util.PyFileCreateUtil;
import com.github.pagehelper.Page;
import com.github.pagehelper.PageHelper;
import freemarker.template.TemplateException;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.FileSystemUtils;
import org.springframework.util.StringUtils;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;

@Slf4j
@Service
public class AzKaBanSqlServiceImpl implements AzKaBanSqlService {

    @Value("${azkaban.template.target.path}")
    public String BASE_CLASS_PATH;
    @Value("${azkaban.template.resource.path}")
    public String BASE_RESOURCE;

    @Autowired
    private AzKaBanConfig azKaBanConfig;

    @Autowired
    private AzkanbanOfflineSQLMapper azkanbanOfflineSQLMapper;

    @Autowired
    private AzkanbanOfflineSQLLogMapper azkanbanOfflineSQLLogMapper;


    @Override
    @Transactional(rollbackFor = Exception.class)
    public void submitSql(AzkanbanOfflineSQL offlineSQL, String userName) throws Exception {

        // 登录
        String sessionID = AzKaBanUtil.login(azKaBanConfig);

        // 同步项目id
        doSynchronizeProjectID(offlineSQL, sessionID);

        // 先查询历史数据
        List<AzkanbanOfflineSQL> history =
                doQueryOfflineSQLLList(offlineSQL.getProjectName());

        // 插入sql 和 批次数据
        doInsertOfflineSQL(offlineSQL, userName, history);

        // 创建 zip
        File zipFile = doCreateZip(history, offlineSQL.getProjectName());

        // 调用 AzkaBan上传flow 且删除 文件
        doUpLoadAndRun(zipFile, offlineSQL, offlineSQL.getProjectName(), sessionID);

    }

    private void doSynchronizeProjectID(AzkanbanOfflineSQL offlineSQL, String sessionID) {
        // 同步项目id
        String projectId = AzKaBanUtil.fetchProjectId(offlineSQL.getProjectName(), sessionID, azKaBanConfig);
        offlineSQL.setProjectId(Long.valueOf(projectId));
    }


    @Override
    public Page<AzkanbanOfflineSQLVO> listPage(AzKaBanParam azKaBanParam) {

        // 分页查询
        if (azKaBanParam == null) {
            azKaBanParam = new AzKaBanParam();
        }

        PageHelper.startPage(azKaBanParam.getPageNum(), azKaBanParam.getPageSize(), YN.Y.getCode());

        return azkanbanOfflineSQLMapper.selectByParam(azKaBanParam);
    }


    @Override
    public AzkanbanOfflineSQLVO queryById(Long id) {
        return azkanbanOfflineSQLMapper.queryById(id);
    }

    @Override
    public void editSql(AzkanbanOfflineSQL offlineSQL, String userName) throws Exception {

        // 登录
        String sessionID = AzKaBanUtil.login(azKaBanConfig);

        // 先查询历史数据
        List<AzkanbanOfflineSQL> history =
                doQueryOfflineSQLLList(offlineSQL.getProjectName());

        // 插入sql 和 批次数据
        doUpdateOfflineSQL(offlineSQL, userName, history);

        // 创建 zip
        File zipFile = doCreateZip(history, offlineSQL.getProjectName());

        // 调用 AzkaBan上传flow 且删除 文件
        doUpLoadAndRun(zipFile, offlineSQL, offlineSQL.getProjectName(), sessionID);
    }

    @Override
    public void updateStatus(Long id, String userName) throws Exception {
        AzkanbanOfflineSQLVO sqlvo = azkanbanOfflineSQLMapper.queryById(id);

        if (Objects.isNull(sqlvo)) {
            throw new BizException("参数错误");
        }

        AzkanbanOfflineSQL sql = new AzkanbanOfflineSQL();

        // 登录
        String sessionID = AzKaBanUtil.login(azKaBanConfig);

        String projectId = AzKaBanUtil.fetchProjectId(sqlvo.getProjectName(), sessionID, azKaBanConfig);
        if (sqlvo.getStatus().equals("1")) {
            // 执行调度
            BeanUtils.copyProperties(sqlvo, sql);
            sql.setStatus("");
            sql.setUpdatedTime(System.currentTimeMillis());
            unscheduleAFlow(sql, sql.getProjectName(), sessionID, projectId);
            azkanbanOfflineSQLMapper.update(sql);
            return;
        }

        // 插入本次数据
        BeanUtils.copyProperties(sqlvo, sql);
        sql.setStatus("");
        sql.setUpdatedTime(System.currentTimeMillis());
        unscheduleAFlow(sql, sql.getProjectName(), sessionID, projectId);
        azkanbanOfflineSQLMapper.update(sql);
    }

    @Override
    public List<String> runSql(AzKaBanRunParam param) throws Exception {

        List<String> execids = new ArrayList<>();

        AzkanbanOfflineSQLVO sqlvo = azkanbanOfflineSQLMapper.queryById(param.getId());

        String[] flowNames = doGetFlowName(sqlvo, param);

        Map<String, String> extParas = doGetExtMap(param);

        String login = AzKaBanUtil.login(azKaBanConfig);

        // 执行
        for (String flowName : flowNames) {
            execids.add(AzKaBanUtil.executeFlow(sqlvo.getProjectName(), flowName, login, azKaBanConfig, extParas));
        }
        return execids;
    }

    @Override
    public Map<String, JSONObject> queryFlowLog(JSONArray dataJson) throws Exception {

        Map<String, JSONObject> respMap = new HashMap<>(dataJson.size());

        String sessionId = AzKaBanUtil.login(azKaBanConfig);

        for (int i = 0; i < dataJson.size(); i++) {
            JSONObject dataJsonJSONObject = dataJson.getJSONObject(i);

            String execId = dataJsonJSONObject.getString("execId");
            // 先不判断 就继续执行
            JSONObject data = AzKaBanUtil.fetchExecJobLogs(dataJsonJSONObject.getLong("offset"), dataJsonJSONObject.getInteger("length"), execId, sessionId, azKaBanConfig);

            respMap.put(execId, data);
        }

        return respMap;
    }

    private String[] doGetFlowName(AzkanbanOfflineSQLVO sqlVO, AzKaBanRunParam param) {
        String[] split = param.getIndicatorCycle().split(",");
        String[] flowNames = new String[split.length];
        for (int i = 0; i < split.length; i++) {
            flowNames[i] = sqlVO.getIndicatorCode() + "_" + split[i];
        }

        return flowNames;
    }

    private Map<String, String> doGetExtMap(AzKaBanRunParam param) {
        Map<String, String> extParas = new HashMap<>(3);
        extParas.put("endTime", param.getEndTime());
        extParas.put("extParas", param.getExtParas());
        extParas.put("startTime", param.getStartTime());
        return extParas;
    }

    private void doUpdateOfflineSQL(AzkanbanOfflineSQL offlineSQL, String userName, List<AzkanbanOfflineSQL> history) {

        // 先查询是否有相同的数据
        Optional<AzkanbanOfflineSQL> first = history.stream()
                .filter(offlineSQL1 -> offlineSQL1.getId().equals(offlineSQL.getId())).findFirst();

        if (first.isPresent()) {
            AzkanbanOfflineSQL sql = first.get();
            history.remove(sql);
            offlineSQL.setProjectId(sql.getProjectId());
        }

        // 插入本次数据
        history.add(offlineSQL);
        offlineSQL.setDeleted(0);
        offlineSQL.setCreatedTime(System.currentTimeMillis());
        offlineSQL.setUpdatedTime(offlineSQL.getCreatedTime());
        azkanbanOfflineSQLMapper.update(offlineSQL);

        // 插入批次表
        doInsertBatchLog(offlineSQL, userName, history);

    }

    private void doInsertBatchLog(AzkanbanOfflineSQL offlineSQL, String userName, List<AzkanbanOfflineSQL> history) {
        String batchNo = String.valueOf(System.currentTimeMillis());

        List<AzkanbanOfflineSQLLog> offlineSQLLogs = new ArrayList<>(history.size());
        history.forEach(offlineSQL1 -> {
            AzkanbanOfflineSQLLog sqlLog = new AzkanbanOfflineSQLLog();
            BeanUtils.copyProperties(offlineSQL, sqlLog);
            sqlLog.setId(null);
            sqlLog.setBatchNo(batchNo);
            sqlLog.setCreatedUserName(userName);
            sqlLog.setUpdatedUserName(userName);
            sqlLog.setCreatedTime(System.currentTimeMillis());
            sqlLog.setUpdatedTime(sqlLog.getUpdatedTime());
            offlineSQLLogs.add(sqlLog);
        });

        azkanbanOfflineSQLLogMapper.insertList(offlineSQLLogs);
    }

    private void doUpLoadAndRun(File zipFile, AzkanbanOfflineSQL offlineSQL, String projectName, String sessionID) throws Exception {
        try {

            // 上传
            String projectId = AzKaBanUtil.uploadZip(projectName, zipFile, azKaBanConfig, sessionID);

            unscheduleAFlow(offlineSQL, projectName, sessionID, projectId);

        } finally {
            zipFile.delete();
        }

    }

    private void unscheduleAFlow(AzkanbanOfflineSQL offlineSQL, String projectName, String sessionID, String projectId) {

        Map<String, String> statusMap = stringToMap(offlineSQL.getStatus());

        String[] indicatorCycleCorns = offlineSQL.getIndicatorCycleCorn().split(",");

        for (int i = 0; i < indicatorCycleCorns.length; i++) {
            String cycleCorn = indicatorCycleCorns[i];
            String[] split = cycleCorn.split("=");
            String flowName = offlineSQL.getIndicatorCode() + "_" + split[0];

            String status = statusMap.get(split[0]);

            if (status == null || "1".equals(status)) {
                // 关闭流调度
                doUnscheduleAFlow(projectId, azKaBanConfig, sessionID, flowName);
                continue;
            }
            // 调度
            AzKaBanUtil.scheduleByCronEXEaFlow(projectName, sessionID, split[1], flowName, azKaBanConfig);
        }
    }

    private Map<String, String> stringToMap(String statusStr) {
        if (StringUtils.isEmpty(statusStr)) return new HashMap<>(1);
        String[] statuss = statusStr.split(",");
        Map<String, String> resp = new HashMap<>(statuss.length);

        for (String str : statuss) {
            String[] split = str.split("=");
            resp.put(split[0], split[1]);
        }

        return resp;
    }


    private void doUnscheduleAFlow(String projectId, AzKaBanConfig azKaBanConfig, String sessionID, String flowId) {
        // 获取调度id
        String scheduleId = AzKaBanUtil.fetchASchedule(projectId, sessionID, flowId, azKaBanConfig);
        if (StringUtils.isEmpty(scheduleId)) {
            return;
        }
        // 执行取消
        AzKaBanUtil.unscheduleAFlow(scheduleId, sessionID, azKaBanConfig);
    }

    private File doCreateZip(List<AzkanbanOfflineSQL> history, String projectName) throws IOException {
        File fileZip;
        String path = BASE_CLASS_PATH + projectName;
        try {
            history.forEach(offlineSQL -> {
                try {

                    JSONObject parseObject = JSONObject.parseObject(offlineSQL.getExtParas());

                    // 创建基础文件
                    Map<String, Object> dataMap = new HashMap<>(7);
                    dataMap.put("PROJECT_NAME", projectName);
                    dataMap.put("INDICATOR_CODE", offlineSQL.getIndicatorCode());
                    dataMap.put("JOB_NAME", parseObject.getString(BasePyTemplateVariableEnum.JOB_NAME.getAttribute()));
                    dataMap.put("INSERT_KUDU_SQL", parseObject.getString(BasePyTemplateVariableEnum.INSERT_KUDU_SQL.getAttribute()));
                    dataMap.put("KUDU_TO_KAFKA_SQL", parseObject.getString(BasePyTemplateVariableEnum.KUDU_TO_KAFKA_SQL.getAttribute()));
                    dataMap.put("ES_INDEX", parseObject.getString(BasePyTemplateVariableEnum.ES_INDEX.getAttribute()));
                    dataMap.put("GET_START_TIME", parseObject.getString(BasePyTemplateVariableEnum.GET_START_TIME.getAttribute()));
                    dataMap.put("GET_END_TIME", parseObject.getString(BasePyTemplateVariableEnum.GET_END_TIME.getAttribute()));

                    PyFileCreateUtil.createFile("BasePyTemplate.ftl", path + "/py/", offlineSQL.getIndicatorCode() + ".py", dataMap);

                    String[] indicatorCycleCorns = offlineSQL.getIndicatorCycleCorn().split(",");

                    JSONObject paras = toParasMap(parseObject.getString(BasePyTemplateVariableEnum.CONFIG_EXT_PARAS.getAttribute()));

                    for (int i = 0; i < indicatorCycleCorns.length; i++) {
                        String cycleCorn = indicatorCycleCorns[i];
                        String[] split = cycleCorn.split("=");

                        // 创建Flow
                        Map<String, Object> flowMap = new HashMap<>(16);
                        flowMap.put("END_TIME", "${endTime}");
                        flowMap.put("EXT_PARAS", "${extParas}");
                        flowMap.put("START_TIME", "${startTime}");
                        flowMap.put("AZKABAN", "${azkaban.flow.execid}");
                        flowMap.put("INDICATOR_CODE", offlineSQL.getIndicatorCode());
                        flowMap.put("CONFIG_EXT_PARAS", doGetParas(paras, split[0]));
                        PyFileCreateUtil.createFile("BaseFlowTemplate.ftl", path + "/", offlineSQL.getIndicatorCode() + "_" + split[0] + ".flow", flowMap);
                    }

                    // 添加其它基础文件
                    PyFileCreateUtil.addBaseFile(BASE_RESOURCE, path);
                } catch (IOException e) {
                    e.printStackTrace();
                } catch (TemplateException e) {
                    e.printStackTrace();
                } catch (Exception e) {
                    e.printStackTrace();
                }

            });

            // 打包zip
            fileZip = new File(path + ".zip");
            FileOutputStream fos1 = new FileOutputStream(fileZip);
            PyFileCreateUtil.toZip(path, fos1, true);
        } finally {
            // 删除目录
            FileSystemUtils.deleteRecursively(new File(path));
        }

        return fileZip;
    }

    private JSONObject toParasMap(String string) {

        if (StringUtils.isEmpty(string)) {
            return new JSONObject();
        }
        return JSONObject.parseObject(string);

    }


    private String doGetParas(JSONObject paras, String indicatorCycle) {
        String CONFIG_EXT_PARAS = "indicator_cycle=" + indicatorCycle;
        if (paras.containsKey(indicatorCycle)) {
            CONFIG_EXT_PARAS = paras.getString(indicatorCycle);
        }
        return CONFIG_EXT_PARAS;
    }

    private void doInsertOfflineSQL(AzkanbanOfflineSQL offlineSQL, String userName, List<AzkanbanOfflineSQL> history) {
        // 先查询是否有相同的数据
        Optional<AzkanbanOfflineSQL> first = history.stream()
                .filter(offlineSQL1 -> offlineSQL1.getIndicatorCode().equals(offlineSQL.getIndicatorCode())).findFirst();

        if (first.isPresent()) {
            throw new BizException("指标已存在，请修改");
        }

        // 插入本次数据
        history.add(offlineSQL);
        offlineSQL.setDeleted(0);
        offlineSQL.setCreatedTime(System.currentTimeMillis());
        offlineSQL.setUpdatedTime(offlineSQL.getCreatedTime());

        azkanbanOfflineSQLMapper.insert(offlineSQL);

        // 插入批次表
        doInsertBatchLog(offlineSQL, userName, history);
    }

    private List<AzkanbanOfflineSQL> doQueryOfflineSQLLList(String projectName) {
        return azkanbanOfflineSQLMapper.queryListByName(projectName);
    }
}
