package com.sh.data.engine.domain.datadev.flink.service.impl;

import com.alibaba.fastjson.JSON;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.google.common.collect.Lists;
import com.sh.data.engine.common.enumDefinition.FlinkNodeTypeEnum;
import com.sh.data.engine.common.exception.BusinessException;
import com.sh.data.engine.common.util.DateUtil;
import com.sh.data.engine.common.util.FlinkTaskScheduleCycleUtil;
import com.sh.data.engine.domain.datadev.flink.model.domain.FlinkJobDomain;
import com.sh.data.engine.domain.datadev.flink.model.domain.FlinkScheduleConfigDomain;
import com.sh.data.engine.domain.datadev.flink.model.domain.FlinkScheduleDayRangeDomain;
import com.sh.data.engine.domain.datadev.flink.model.domain.FlinkSqlTaskDomain;
import com.sh.data.engine.domain.datadev.flink.model.param.StartFlinkTaskParam;
import com.sh.data.engine.domain.datadev.flink.service.*;
import com.sh.data.engine.domain.datadev.flinkTaskAboutExexte.FlinkSqlExcService;
import com.sh.data.engine.domain.util.ConvertUtil;
import com.sh.data.engine.repository.dao.datadev.entity.FlinkNodeConfigEntity;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import java.util.Date;
import java.util.List;
import java.util.Objects;

/**
 * @author：jingchen.hr
 * @descripion:
 * @data:6/11/202210:37 AM
 */
@Service
@Slf4j
public class FlinkSqlTaskServiceImpl implements FlinkSqlTaskService {

    @Autowired
    private FlinkService flinkService;

    @Autowired
    private FlinkNodeConfigService flinkNodeConfigService;

    @Lazy
    @Autowired
    private FlinkTaskV1Service flinkTaskV1Service;

    @Lazy
    @Autowired
    private FlinkSqlExcService flinkSqlExcService;

    @Lazy
    @Autowired
    private FlinkJobService flinkJobService;

    private static final String ITEM_SCRIPT = "script";
    private static final String ITEM_SOURCE = "source";
    private static final String ITEM_RESULT = "result";
    private static final String ITEM_DIM = "dim";
    private static final String ITEM_ENV = "env";
    private static final String ITEM_SCHEDULE = "schedule";

    @Override
    public void delete(Long nodeId) {
        // 可能不需要这个方法了
        // @see com.xuelangyun.hufu.service.datadev.flink.impl.FlinkNodeServiceImpl.deleteNode
    }

    @Override
    public void saveBaseConfig(Long nodeId, String userId, Long projectId) {
        FlinkSqlTaskDomain activeByNodeId = getActiveByNodeId(nodeId);
        FlinkNodeConfigEntity insert = new FlinkNodeConfigEntity();
        if (null == activeByNodeId) {
            insert.setProjectId(projectId);
            insert.setNodeId(nodeId);
            insert.setIsPublished(0);
            insert.setCreatorId(userId);
            insert.setUpdaterId(userId);
            insert.setIsActive(1);
            insert.setCreateTime(new Date());
            insert.setUpdateTime(new Date());
            insert.setEnvConfig(flinkService.getDefaultEnv());
            flinkNodeConfigService.save(insert);
        }
    }

    @Override
    public void saveScript(Long nodeId, String script, String userId, Long projectId) {
        saveConfig(nodeId, userId, projectId, script, ITEM_SCRIPT);
    }

    @Override
    public void saveSource(Long nodeId, String sourceConfig, String userId, Long projectId) {
        saveConfig(nodeId, userId, projectId, sourceConfig, ITEM_SOURCE);
    }

    @Override
    public void saveSourceOnly(Long nodeId, String sourceConfig, String userId) {
        final FlinkSqlTaskDomain taskDomain = getActiveByNodeId(nodeId);

        FlinkNodeConfigEntity.SqlTaskConfig sqlTaskConfig = new FlinkNodeConfigEntity.SqlTaskConfig();
        FlinkNodeConfigEntity entity = new FlinkNodeConfigEntity();
        entity.setId(taskDomain.getId());
        entity.setUpdaterId(userId);
        entity.setUpdateTime(new Date());
        sqlTaskConfig.setSourceConfig(sourceConfig);
        flinkNodeConfigService.updateById(entity);
    }

    @Override
    public void saveSink(Long nodeId, String sinkConfig, String userId, Long projectId) {
        saveConfig(nodeId, userId, projectId, sinkConfig, ITEM_RESULT);
    }

    @Override
    public void saveDim(Long nodeId, String dimConfig, String userId, Long projectId) {
        saveConfig(nodeId, userId, projectId, dimConfig, ITEM_DIM);
    }

    @Override
    public void saveEnv(Long nodeId, String envConfig, String userId, Long projectId) {
        saveConfig(nodeId, userId, projectId, envConfig, ITEM_ENV);
    }

    @Override
    public void saveSchedule(Long nodeId, String scheduleConfig, String userId, Long projectId) {
        saveConfig(nodeId, userId, projectId, scheduleConfig, ITEM_SCHEDULE);
    }

    @Override
    public void reCalculateNextStartAndStopTime(Long nodeId, boolean includeToday) {

        final FlinkSqlTaskDomain domain = getActiveByNodeId(nodeId);
        final FlinkScheduleConfigDomain scheduleConfigDomain = domain.getScheduleConfigDomain();
        if (null == scheduleConfigDomain) {
            return;
        }

        final List<FlinkScheduleDayRangeDomain> rangeList = scheduleConfigDomain.getRangeList();

        List<ImmutablePair<Date, Date>> effectiveDateStartEndPairList = Lists.newLinkedList();

        String dateFormat = "yyyy-MM-dd HH:mm:ss";

        for (FlinkScheduleDayRangeDomain flinkScheduleDayRangeDomain : rangeList) {
            effectiveDateStartEndPairList.add(
                new ImmutablePair<>(
                    DateUtil.toDate(
                        flinkScheduleDayRangeDomain.getEffectiveDateStart() + " 00:00:00", dateFormat),
                    DateUtil.toDate(
                        flinkScheduleDayRangeDomain.getEffectiveDateEnd() + " 23:59:59", dateFormat)));
        }

        final Date nextStartTime =
            FlinkTaskScheduleCycleUtil.getNextStartTime(
                scheduleConfigDomain.getEffectDayType(),
                effectiveDateStartEndPairList,
                scheduleConfigDomain.getStartTimeHour(),
                scheduleConfigDomain.getStartTimeMin(),
                includeToday);

        final Date nextStopTime =
            FlinkTaskScheduleCycleUtil.getNextStopTime(
                scheduleConfigDomain.getEffectDayType(),
                effectiveDateStartEndPairList,
                nextStartTime,
                scheduleConfigDomain.getStartTimeHour(),
                scheduleConfigDomain.getStartTimeMin(),
                scheduleConfigDomain.getEndTimeHour(),
                scheduleConfigDomain.getEndTimeMin());

        FlinkNodeConfigEntity entity = new FlinkNodeConfigEntity();
        entity.setId(domain.getId());
        entity.setNextExecTime(nextStartTime);
        entity.setNextStopTime(nextStopTime);
        entity.setIsActive(1);
        flinkNodeConfigService.updateById(entity);
    }

    @Override
    public FlinkSqlTaskDomain getById(Long id) {
        final FlinkNodeConfigEntity flinkSqlTaskEntity = flinkNodeConfigService.getById(id);
        FlinkSqlTaskDomain flinkSqlTaskDomain = new FlinkSqlTaskDomain();
        if (Objects.nonNull(flinkSqlTaskEntity)) {
            flinkSqlTaskDomain.init(flinkSqlTaskEntity);
        }

        return flinkSqlTaskDomain;
    }

    @Override
    public FlinkSqlTaskDomain getActiveByNodeId(Long nodeId) {
        FlinkNodeConfigEntity activeResourceByNodeId =
            flinkNodeConfigService.getActiveResourceByNodeId(nodeId);
        FlinkSqlTaskDomain flinkSqlTaskDomain1 = null;
        if (Objects.nonNull(activeResourceByNodeId)) {
            flinkSqlTaskDomain1 = new FlinkSqlTaskDomain();
            flinkSqlTaskDomain1.init(activeResourceByNodeId);
        }
        return flinkSqlTaskDomain1;
    }

    @Override
    public FlinkNodeConfigEntity getActiveEntityByNodeId(Long nodeId) {
        return null;
    }

    @Override
    public List<FlinkSqlTaskDomain> getAllRunnableTaskList() {

        final List<FlinkNodeConfigEntity> flinkSqlTaskEntities =
            flinkNodeConfigService.getRunnableTaskList(FlinkNodeTypeEnum.TASK_SQL);
        List<FlinkSqlTaskDomain> domains = Lists.newArrayList();
        flinkSqlTaskEntities.forEach(
            i -> {
                FlinkSqlTaskDomain flinkSqlTaskDomain = new FlinkSqlTaskDomain();
                flinkSqlTaskDomain.init(i);
                domains.add(flinkSqlTaskDomain);
            });
        return domains;
    }

    @Override
    public List<FlinkSqlTaskDomain> getAllTaskList() {
        final List<FlinkNodeConfigEntity> flinkSqlTaskEntities =
            flinkNodeConfigService.getAllTaskList(FlinkNodeTypeEnum.TASK_SQL);

        List<FlinkSqlTaskDomain> domains = Lists.newArrayList();
        flinkSqlTaskEntities.forEach(
            i -> {
                FlinkSqlTaskDomain flinkSqlTaskDomain = new FlinkSqlTaskDomain();
                flinkSqlTaskDomain.init(i);
                domains.add(flinkSqlTaskDomain);
            });
        return domains;
    }

    @Override
    public List<FlinkSqlTaskDomain> getAllStoppableTaskList() {

        List<FlinkNodeConfigEntity> flinkSqlTaskEntities =
            flinkNodeConfigService.getStopedTaskList(FlinkNodeTypeEnum.TASK_SQL);
        List<FlinkSqlTaskDomain> domains = Lists.newArrayList();
        flinkSqlTaskEntities.forEach(
            i -> {
                FlinkSqlTaskDomain flinkSqlTaskDomain = new FlinkSqlTaskDomain();
                flinkSqlTaskDomain.init(i);
                domains.add(flinkSqlTaskDomain);
            });
        return domains;
    }

    @Override
    public void publish(Long nodeId, Integer status, String userId, String remark) {

        final FlinkSqlTaskDomain flinkSqlTaskDomain = this.getActiveByNodeId(nodeId);
        if (null == flinkSqlTaskDomain) {
            throw new BusinessException("请先保存后再发布");
        }

        if (StringUtils.isBlank(flinkSqlTaskDomain.getScriptContent())) {
            throw new BusinessException("请先保存脚本信息");
        }
        if (Objects.equals(flinkSqlTaskDomain.getIsPublished(), status)) {
            return;
        }

        FlinkNodeConfigEntity flinkNodeConfigEntity = new FlinkNodeConfigEntity();
        flinkNodeConfigEntity.setId(flinkSqlTaskDomain.getId());
        flinkNodeConfigEntity.setIsPublished(status);
        flinkNodeConfigEntity.setRemark(remark);
        flinkNodeConfigEntity.setIsActive(1);

        UpdateWrapper<FlinkNodeConfigEntity> wrapper = Wrappers.update();
        wrapper
            .lambda()
            .eq(FlinkNodeConfigEntity::getNodeId, nodeId)
            .eq(FlinkNodeConfigEntity::getIsActive, 1);

        if (Objects.equals(status, 1)) {
            flinkNodeConfigEntity.setPublishTime(new Date());
            flinkNodeConfigEntity.setPublisherId(userId);
            flinkNodeConfigService.update(flinkNodeConfigEntity, wrapper);

            StartFlinkTaskParam startFlinkTaskParam = new StartFlinkTaskParam();
            startFlinkTaskParam.setExecType(2);
            startFlinkTaskParam.setNodeId(nodeId);
            startFlinkTaskParam.setUserId(userId);
            flinkSqlExcService.clientJobStart(startFlinkTaskParam);
        } else {
            flinkNodeConfigEntity.setPublishTime(null);
            flinkNodeConfigEntity.setPublisherId(null);
            flinkNodeConfigService.update(flinkNodeConfigEntity, wrapper);

            FlinkJobDomain flinkJobDomain = flinkJobService.getByNodeId(nodeId, 2);
            flinkTaskV1Service.stopFlinkJob(flinkJobDomain.getId(), 1);
        }
    }

    @Override
    public IPage<FlinkSqlTaskDomain> getVersions(Long nodeId, Integer pageNum, Integer pageSize) {
        IPage<FlinkNodeConfigEntity> versionsTaskList1 =
            flinkNodeConfigService.getVersionsTaskList(
                FlinkNodeTypeEnum.TASK_SQL, nodeId, pageNum, pageSize);
        IPage<FlinkSqlTaskDomain> versionsTaskList =
            versionsTaskList1.convert(e -> nodeConfigToSqlTask(e));
        return versionsTaskList;
    }

    private FlinkSqlTaskDomain nodeConfigToSqlTask(FlinkNodeConfigEntity flinkNodeConfigEntity) {
        FlinkSqlTaskDomain flinkSqlTaskDomain = new FlinkSqlTaskDomain();
        flinkSqlTaskDomain.init(flinkNodeConfigEntity);
        return flinkSqlTaskDomain;
    }

    @Override
    @Transactional(rollbackFor = Exception.class, transactionManager = "engineTransactionManager")
    public void rollbackVersion(Long nodeId, Long versionId) {
        LambdaQueryWrapper<FlinkNodeConfigEntity> eq =
            new LambdaQueryWrapper<FlinkNodeConfigEntity>()
                .eq(FlinkNodeConfigEntity::getNodeId, nodeId)
                .eq(FlinkNodeConfigEntity::getIsActive, 1);

        FlinkNodeConfigEntity one = flinkNodeConfigService.getOne(eq, false);
        one.setIsActive(0);
        flinkNodeConfigService.updateById(one);

        FlinkNodeConfigEntity flinkNodeConfigEntity = flinkNodeConfigService.getById(versionId);
        flinkNodeConfigEntity.setIsActive(1);
        flinkNodeConfigService.updateById(flinkNodeConfigEntity);
    }

    @Override
    public List<FlinkSqlTaskDomain> getTaskByDsId(Integer dsId) {
        // 获取数据源关联的任务列表 该方法是为了删除某个数据源时校验是否被使用
        return null;
    }

    private void saveConfig(Long nodeId, String userId, Long projectId, String content, String item) {
        final FlinkSqlTaskDomain taskDomain = getActiveByNodeId(nodeId);

        if (null == content) {
            return;
        }
        FlinkNodeConfigEntity entity = new FlinkNodeConfigEntity();
        // 如果是已发布状态，先把当前这个版本的isActive设为0
        if (null != taskDomain && Objects.equals(1, taskDomain.getIsPublished())) {
            // FlinkSqlTaskEntity flinkTaskEntity = new FlinkSqlTaskEntity();
            FlinkNodeConfigEntity update = new FlinkNodeConfigEntity();
            update.setId(taskDomain.getId());
            update.setIsPublished(1);
            update.setIsActive(0);
            update.setUpdateTime(new Date());
            flinkNodeConfigService.updateById(update);

            entity = ConvertUtil.copyProperties(taskDomain, FlinkNodeConfigEntity.class);
            FlinkNodeConfigEntity.SqlTaskConfig sqlTaskConfig = new FlinkNodeConfigEntity.SqlTaskConfig();
            sqlTaskConfig.setSinkConfig(taskDomain.getSinkConfig());
            sqlTaskConfig.setSourceConfig(taskDomain.getSourceConfig());
            sqlTaskConfig.setDimConfig(taskDomain.getDimConfig());
            sqlTaskConfig.setScriptContent(taskDomain.getScriptContent());
            String scheduleConfig = taskDomain.getScheduleConfig();
            FlinkNodeConfigEntity.FlinkScheduleConfig flinkScheduleConfig =
                JSON.parseObject(scheduleConfig, FlinkNodeConfigEntity.FlinkScheduleConfig.class);
            entity.setSqlTaskConfig(sqlTaskConfig);
            entity.setScheduleConfig(flinkScheduleConfig);
            entity.setId(null);
        }

        if (null == taskDomain || Objects.equals(1, taskDomain.getIsPublished())) {
            entity.setProjectId(projectId);
            entity.setNodeId(nodeId);
            entity.setIsPublished(0);
            entity.setCreatorId(userId);
            entity.setUpdaterId(userId);
            entity.setIsActive(1);
            entity.setCreateTime(new Date());
            entity.setUpdateTime(new Date());
            entity.setNodeType(FlinkNodeTypeEnum.TASK_SQL.getCode());
            saveConfig(entity, content, item);
            if (null == taskDomain) {
                entity.setEnvConfig(flinkService.getDefaultEnv());
            }
            flinkNodeConfigService.save(entity);
            return;
        }

        FlinkNodeConfigEntity originEntity = new FlinkNodeConfigEntity();
        originEntity.setId(taskDomain.getId());
        saveConfig(originEntity, content, item);
        originEntity.setIsActive(taskDomain.getIsActive());
        originEntity.setIsPublished(taskDomain.getIsPublished());
        originEntity.setUpdaterId(userId);
        originEntity.setUpdateTime(new Date());
        flinkNodeConfigService.updateById(originEntity);
    }

    private void saveConfig(FlinkNodeConfigEntity entity, String content, String item) {
        FlinkNodeConfigEntity.SqlTaskConfig sqlTaskConfig = entity.getSqlTaskConfig();
        if (Objects.isNull(sqlTaskConfig)) {
            sqlTaskConfig = new FlinkNodeConfigEntity.SqlTaskConfig();
        }
        if (Objects.nonNull(entity.getId())) {
            FlinkNodeConfigEntity.SqlTaskConfig sqlTaskConfigOri =
                flinkNodeConfigService.getById(entity.getId()).getSqlTaskConfig();
            if (Objects.nonNull(sqlTaskConfigOri)) {
                sqlTaskConfig = sqlTaskConfigOri;
            }
        }
        if (ITEM_SCRIPT.equals(item)) {
            sqlTaskConfig.setScriptContent(content);

        } else if (ITEM_RESULT.equals(item)) {

            sqlTaskConfig.setSinkConfig(content);

        } else if (ITEM_DIM.equals(item)) {

            sqlTaskConfig.setDimConfig(content);
        } else if (ITEM_ENV.equals(item)) {
            entity.setEnvConfig(content);
        } else if (ITEM_SOURCE.equals(item)) {
            sqlTaskConfig.setSourceConfig(content);
        } else if (ITEM_SCHEDULE.equals(item)) {
            FlinkNodeConfigEntity.FlinkScheduleConfig flinkScheduleConfig =
                JSON.parseObject(content, FlinkNodeConfigEntity.FlinkScheduleConfig.class);
            entity.setScheduleConfig(flinkScheduleConfig);
        }
        entity.setSqlTaskConfig(sqlTaskConfig);
    }
}
