package com.wande.dataplatform.service.impl;

import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.json.JSONUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.wande.common.core.exception.ServiceException;
import com.wande.common.mybatis.core.page.PageQuery;
import com.wande.common.mybatis.core.page.TableDataInfo;
import com.wande.common.satoken.utils.LoginHelper;
import com.wande.dataplatform.common.enums.DataPlatformErrorCode;
import com.wande.dataplatform.config.DolphinSchedulerProperties;
import com.wande.dataplatform.domain.*;
import com.wande.dataplatform.domain.dto.DsSyncConfigDTO;
import com.wande.dataplatform.domain.vo.DsTaskMappingVO;
import com.wande.dataplatform.mapper.DsSyncConfigMapper;
import com.wande.dataplatform.mapper.DsSyncLogMapper;
import com.wande.dataplatform.mapper.DsTaskMappingMapper;
import com.wande.dataplatform.mapper.EtlTaskMapper;
import com.wande.dataplatform.service.IDolphinSchedulerSyncService;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.http.*;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.client.RestTemplate;

import java.time.LocalDateTime;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * DolphinScheduler同步服务实现
 *
 * @author DataPlatform
 */
@Slf4j
@Service
@RequiredArgsConstructor
public class DolphinSchedulerSyncServiceImpl implements IDolphinSchedulerSyncService {

    private final DsSyncConfigMapper syncConfigMapper;
    private final DsSyncLogMapper syncLogMapper;
    private final DsTaskMappingMapper taskMappingMapper;
    private final EtlTaskMapper etlTaskMapper;
    private final DolphinSchedulerProperties dsProperties;
    private final RestTemplate restTemplate;

    @Override
    @Transactional(rollbackFor = Exception.class)
    public Long createSyncConfig(DsSyncConfigDTO dto) {
        DsSyncConfig config = BeanUtil.copyProperties(dto, DsSyncConfig.class);
        config.setSyncStatus("READY");
        syncConfigMapper.insert(config);
        return config.getId();
    }

    @Override
    @Transactional(rollbackFor = Exception.class)
    public Boolean updateSyncConfig(DsSyncConfigDTO dto) {
        DsSyncConfig config = syncConfigMapper.selectById(dto.getId());
        if (config == null) {
            throw new ServiceException("同步配置不存在");
        }
        BeanUtil.copyProperties(dto, config, "id", "tenantId");
        return syncConfigMapper.updateById(config) > 0;
    }

    @Override
    @Transactional(rollbackFor = Exception.class)
    public Boolean deleteSyncConfig(Long id) {
        return syncConfigMapper.deleteById(id) > 0;
    }

    @Override
    public TableDataInfo<DsSyncConfig> querySyncConfigList(PageQuery pageQuery) {
        LambdaQueryWrapper<DsSyncConfig> wrapper = Wrappers.lambdaQuery();
        wrapper.orderByDesc(DsSyncConfig::getCreateTime);
        
        Page<DsSyncConfig> page = syncConfigMapper.selectPage(pageQuery.build(), wrapper);
        return TableDataInfo.build(page);
    }

    @Override
    @Transactional(rollbackFor = Exception.class)
    public Boolean triggerSync(Long configId) {
        DsSyncConfig config = syncConfigMapper.selectById(configId);
        if (config == null) {
            throw new ServiceException("同步配置不存在");
        }

        DsSyncLog log = new DsSyncLog();
        log.setTenantId(config.getTenantId());
        log.setConfigId(configId);
        log.setSyncType(config.getSyncType());
        log.setSyncAction("MANUAL_SYNC");
        log.setStartTime(LocalDateTime.now());
        log.setSyncStatus("RUNNING");

        try {
            // 根据同步类型执行不同的同步逻辑
            switch (config.getSyncType()) {
                case "WORKFLOW":
                    syncAllWorkflows();
                    break;
                case "TASK":
                    syncAllTasks();
                    break;
                case "INSTANCE":
                    syncAllInstances();
                    break;
                default:
                    throw new ServiceException("不支持的同步类型: " + config.getSyncType());
            }

            log.setSyncStatus("SUCCESS");
            log.setEndTime(LocalDateTime.now());
            log.setDuration(java.time.Duration.between(log.getStartTime(), log.getEndTime()).toMillis());
            
            // 更新配置的最后同步时间
            config.setLastSyncTime(LocalDateTime.now());
            config.setSyncStatus("SUCCESS");
            syncConfigMapper.updateById(config);
            
            return true;
        } catch (Exception e) {
            log.setSyncStatus("FAILED");
            log.setErrorMessage(e.getMessage());
            log.setEndTime(LocalDateTime.now());
            log.setDuration(java.time.Duration.between(log.getStartTime(), log.getEndTime()).toMillis());
            
            config.setSyncStatus("FAILED");
            config.setErrorMessage(e.getMessage());
            syncConfigMapper.updateById(config);
            
            throw new ServiceException("同步失败: " + e.getMessage());
        } finally {
            syncLogMapper.insert(log);
        }
    }

    @Override
    @Transactional(rollbackFor = Exception.class)
    public Long createEtlTaskMapping(Long etlTaskId) {
        EtlTask etlTask = etlTaskMapper.selectById(etlTaskId);
        if (etlTask == null) {
            throw new ServiceException("ETL任务不存在");
        }

        // 检查是否已存在映射
        LambdaQueryWrapper<DsTaskMapping> wrapper = Wrappers.lambdaQuery();
        wrapper.eq(DsTaskMapping::getPlatformTaskId, etlTaskId);
        wrapper.eq(DsTaskMapping::getPlatformTaskType, "ETL");
        DsTaskMapping existingMapping = taskMappingMapper.selectOne(wrapper);
        if (existingMapping != null) {
            return existingMapping.getId();
        }

        DsSyncLog log = new DsSyncLog();
        log.setTenantId(etlTask.getTenantId());
        log.setSyncType("WORKFLOW");
        log.setSyncAction("CREATE");
        log.setSourceId(etlTaskId);
        log.setStartTime(LocalDateTime.now());

        try {
            // 调用DolphinScheduler API创建工作流
            Long workflowCode = createDolphinSchedulerWorkflow(etlTask);

            // 创建映射关系
            DsTaskMapping mapping = new DsTaskMapping();
            mapping.setTenantId(etlTask.getTenantId());
            mapping.setPlatformTaskId(etlTaskId);
            mapping.setPlatformTaskType("ETL");
            mapping.setPlatformTaskName(etlTask.getName());
            mapping.setDsProjectCode(dsProperties.getProjectCode());
            mapping.setDsWorkflowCode(workflowCode);
            mapping.setDsWorkflowName("ETL_" + etlTask.getName());
            mapping.setMappingStatus("ACTIVE");
            mapping.setSyncStatus("SYNCED");
            mapping.setLastSyncTime(LocalDateTime.now());
            
            taskMappingMapper.insert(mapping);

            log.setTargetId(workflowCode);
            log.setSyncStatus("SUCCESS");
            log.setEndTime(LocalDateTime.now());
            log.setDuration(java.time.Duration.between(log.getStartTime(), log.getEndTime()).toMillis());
            
            return mapping.getId();
        } catch (Exception e) {
            log.setSyncStatus("FAILED");
            log.setErrorMessage(e.getMessage());
            log.setEndTime(LocalDateTime.now());
            log.setDuration(java.time.Duration.between(log.getStartTime(), log.getEndTime()).toMillis());
            throw new ServiceException("创建工作流映射失败: " + e.getMessage());
        } finally {
            syncLogMapper.insert(log);
        }
    }

    @Override
    public Boolean syncWorkflowStatus(Long workflowCode) {
        try {
            // 调用DolphinScheduler API查询工作流状态
            String url = dsProperties.getApiUrl() + "/projects/" + dsProperties.getProjectCode() 
                + "/process-definition/" + workflowCode;
            
            HttpHeaders headers = createHeaders();
            HttpEntity<String> entity = new HttpEntity<>(headers);
            
            ResponseEntity<Map> response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class);
            
            if (response.getStatusCode() == HttpStatus.OK && response.getBody() != null) {
                Map<String, Object> data = (Map<String, Object>) response.getBody().get("data");
                
                // 更新映射状态
                LambdaQueryWrapper<DsTaskMapping> wrapper = Wrappers.lambdaQuery();
                wrapper.eq(DsTaskMapping::getDsWorkflowCode, workflowCode);
                DsTaskMapping mapping = taskMappingMapper.selectOne(wrapper);
                
                if (mapping != null) {
                    mapping.setSyncStatus("SYNCED");
                    mapping.setLastSyncTime(LocalDateTime.now());
                    taskMappingMapper.updateById(mapping);
                }
                
                return true;
            }
            return false;
        } catch (Exception e) {
            log.error("同步工作流状态失败: {}", e.getMessage(), e);
            return false;
        }
    }

    @Override
    public Boolean syncInstanceStatus(Long instanceId) {
        try {
            // 调用DolphinScheduler API查询实例状态
            String url = dsProperties.getApiUrl() + "/projects/" + dsProperties.getProjectCode() 
                + "/process-instances/" + instanceId;
            
            HttpHeaders headers = createHeaders();
            HttpEntity<String> entity = new HttpEntity<>(headers);
            
            ResponseEntity<Map> response = restTemplate.exchange(url, HttpMethod.GET, entity, Map.class);
            
            if (response.getStatusCode() == HttpStatus.OK && response.getBody() != null) {
                Map<String, Object> data = (Map<String, Object>) response.getBody().get("data");
                
                // 记录同步日志
                DsSyncLog log = new DsSyncLog();
                log.setTenantId(Long.valueOf(LoginHelper.getTenantId()));
                log.setSyncType("INSTANCE");
                log.setSyncAction("SYNC_STATUS");
                log.setTargetId(Long.valueOf(instanceId));
                log.setSyncStatus("SUCCESS");
                log.setSyncResult(JSONUtil.toJsonStr(data));
                log.setStartTime(LocalDateTime.now());
                log.setEndTime(LocalDateTime.now());
                log.setDuration(0L);
                syncLogMapper.insert(log);
                
                return true;
            }
            return false;
        } catch (Exception e) {
            log.error("同步实例状态失败: {}", e.getMessage(), e);
            return false;
        }
    }

    @Override
    public DsTaskMapping queryTaskMapping(Long platformTaskId, String platformTaskType) {
        LambdaQueryWrapper<DsTaskMapping> wrapper = Wrappers.lambdaQuery();
        wrapper.eq(DsTaskMapping::getPlatformTaskId, platformTaskId);
        wrapper.eq(DsTaskMapping::getPlatformTaskType, platformTaskType);
        wrapper.eq(DsTaskMapping::getTenantId, LoginHelper.getTenantId());
        return taskMappingMapper.selectOne(wrapper);
    }

    @Override
    public TableDataInfo<DsTaskMappingVO> queryTaskMappingList(PageQuery pageQuery) {
        LambdaQueryWrapper<DsTaskMapping> wrapper = Wrappers.lambdaQuery();
        wrapper.orderByDesc(DsTaskMapping::getCreateTime);
        
        Page<DsTaskMapping> page = taskMappingMapper.selectPage(pageQuery.build(), wrapper);
        
        // 转换为VO
        List<DsTaskMappingVO> voList = page.getRecords().stream()
            .map(mapping -> BeanUtil.copyProperties(mapping, DsTaskMappingVO.class))
            .toList();
        
        return TableDataInfo.build(voList);
    }

    @Override
    public TableDataInfo<DsSyncLog> querySyncLogs(Long configId, PageQuery pageQuery) {
        LambdaQueryWrapper<DsSyncLog> wrapper = Wrappers.lambdaQuery();
        if (configId != null) {
            wrapper.eq(DsSyncLog::getConfigId, configId);
        }
        wrapper.orderByDesc(DsSyncLog::getCreateTime);
        
        Page<DsSyncLog> page = syncLogMapper.selectPage(pageQuery.build(), wrapper);
        return TableDataInfo.build(page);
    }

    @Override
    @Transactional(rollbackFor = Exception.class)
    public Boolean buildDataLineage(Long taskId) {
        // TODO: 实现数据血缘构建逻辑
        // 1. 查询任务的输入输出配置
        // 2. 解析数据源和目标表
        // 3. 创建数据血缘记录到 dp_data_lineage 表
        log.info("构建数据血缘关系: taskId={}", taskId);
        return true;
    }

    @Override
    @Scheduled(fixedDelayString = "${dataplatform.dolphinscheduler.sync-interval:60000}")
    public void scheduledSync() {
        log.debug("执行定时同步任务");
        
        // 查询所有启用的自动同步配置
        LambdaQueryWrapper<DsSyncConfig> wrapper = Wrappers.lambdaQuery();
        wrapper.eq(DsSyncConfig::getEnabled, 1);
        wrapper.eq(DsSyncConfig::getSyncMode, "AUTO");
        
        List<DsSyncConfig> configs = syncConfigMapper.selectList(wrapper);
        
        for (DsSyncConfig config : configs) {
            try {
                triggerSync(config.getId());
            } catch (Exception e) {
                log.error("定时同步失败: configId={}, error={}", config.getId(), e.getMessage());
            }
        }
    }

    /**
     * 创建DolphinScheduler工作流
     */
    private Long createDolphinSchedulerWorkflow(EtlTask etlTask) {
        try {
            String url = dsProperties.getApiUrl() + "/projects/" + dsProperties.getProjectCode() 
                + "/process-definition";
            
            Map<String, Object> workflowDef = new HashMap<>();
            workflowDef.put("name", "ETL_" + etlTask.getName());
            workflowDef.put("description", "ETL Task: " + etlTask.getName());
            workflowDef.put("tenantCode", "default");
            workflowDef.put("executionType", "PARALLEL");
            
            // 构建任务定义
            Map<String, Object> taskDef = new HashMap<>();
            taskDef.put("name", etlTask.getName());
            taskDef.put("taskType", "SHELL");
            taskDef.put("flag", "YES");
            
            workflowDef.put("taskDefinitionJson", JSONUtil.toJsonStr(List.of(taskDef)));
            workflowDef.put("taskRelationJson", "[]");
            
            HttpHeaders headers = createHeaders();
            headers.setContentType(MediaType.APPLICATION_JSON);
            HttpEntity<Map<String, Object>> entity = new HttpEntity<>(workflowDef, headers);
            
            ResponseEntity<Map> response = restTemplate.exchange(url, HttpMethod.POST, entity, Map.class);
            
            if (response.getStatusCode() == HttpStatus.OK && response.getBody() != null) {
                Map<String, Object> data = (Map<String, Object>) response.getBody().get("data");
                return Long.valueOf(data.get("code").toString());
            }
            
            throw new ServiceException("创建DolphinScheduler工作流失败");
        } catch (Exception e) {
            log.error("创建DolphinScheduler工作流失败: {}", e.getMessage(), e);
            throw new ServiceException("创建DolphinScheduler工作流失败: " + e.getMessage());
        }
    }

    /**
     * 同步所有工作流
     */
    private void syncAllWorkflows() {
        log.info("同步所有工作流");
        // TODO: 实现工作流同步逻辑
    }

    /**
     * 同步所有任务
     */
    private void syncAllTasks() {
        log.info("同步所有任务");
        // TODO: 实现任务同步逻辑
    }

    /**
     * 同步所有实例
     */
    private void syncAllInstances() {
        log.info("同步所有实例");
        // TODO: 实现实例同步逻辑
    }

    /**
     * 创建HTTP请求头
     */
    private HttpHeaders createHeaders() {
        HttpHeaders headers = new HttpHeaders();
        headers.set("token", dsProperties.getToken());
        return headers;
    }
}
