package com.wande.dataplatform.etl.executor;

import cn.hutool.json.JSONUtil;
import com.wande.dataplatform.domain.EtlInstance;
import com.wande.dataplatform.domain.EtlTask;
import com.wande.dataplatform.etl.node.*;
import com.wande.dataplatform.mapper.EtlInstanceMapper;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;

import java.time.LocalDateTime;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;

/**
 * ETL执行器实现
 *
 * @author dataplatform
 */
@Slf4j
@Service
@RequiredArgsConstructor
public class EtlExecutorImpl implements IEtlExecutor {

    private final EtlInstanceMapper etlInstanceMapper;
    private final InputNode inputNode;
    private final TransformNode transformNode;
    private final OutputNode outputNode;

    /**
     * 存储执行进度
     */
    private final Map<Long, EtlProgress> progressMap = new ConcurrentHashMap<>();

    @Override
    public Long execute(EtlTask task) {
        // 创建实例
        EtlInstance instance = new EtlInstance();
        instance.setTaskId(task.getId());
        instance.setStatus("RUNNING");
        instance.setStartTime(LocalDateTime.now());
        etlInstanceMapper.insert(instance);

        // 异步执行
        executeAsync(task, instance.getId());

        return instance.getId();
    }

    @Async
    public void executeAsync(EtlTask task, Long instanceId) {
        EtlProgress progress = new EtlProgress();
        progress.setInstanceId(instanceId);
        progress.setStatus("RUNNING");
        progress.setPercentage(0);
        progressMap.put(instanceId, progress);

        try {
            // 创建执行上下文
            EtlContext context = new EtlContext();
            context.setTaskId(task.getId());
            context.setInstanceId(instanceId);

            // 1. 执行输入节点
            progress.setCurrentStep("读取源数据");
            progress.setPercentage(10);
            
            Map<String, Object> sourceConfig = JSONUtil.toBean(task.getSourceConfig(), Map.class);
            context.setConfig(sourceConfig);
            EtlNodeResult inputResult = inputNode.execute(context);
            
            if (!inputResult.isSuccess()) {
                throw new RuntimeException(inputResult.getErrorMessage());
            }
            
            context.setDataSet(inputResult.getData());
            progress.setTotalRows((long) inputResult.getData().size());
            progress.setPercentage(30);

            // 2. 执行转换节点
            progress.setCurrentStep("转换数据");
            
            Map<String, Object> transformConfig = JSONUtil.toBean(task.getTransformConfig(), Map.class);
            context.setConfig(transformConfig);
            EtlNodeResult transformResult = transformNode.execute(context);
            
            if (!transformResult.isSuccess()) {
                throw new RuntimeException(transformResult.getErrorMessage());
            }
            
            context.setDataSet(transformResult.getData());
            progress.setPercentage(60);

            // 3. 执行输出节点
            progress.setCurrentStep("写入目标数据");
            
            Map<String, Object> targetConfig = JSONUtil.toBean(task.getTargetConfig(), Map.class);
            context.setConfig(targetConfig);
            EtlNodeResult outputResult = outputNode.execute(context);
            
            if (!outputResult.isSuccess()) {
                throw new RuntimeException(outputResult.getErrorMessage());
            }
            
            progress.setPercentage(100);
            progress.setProcessedRows(context.getStatistics().getTargetRows());

            // 更新实例状态为成功
            EtlInstance instance = new EtlInstance();
            instance.setId(instanceId);
            instance.setStatus("SUCCESS");
            instance.setEndTime(LocalDateTime.now());
            instance.setSourceRows(context.getStatistics().getSourceRows());
            instance.setTargetRows(context.getStatistics().getTargetRows());
            instance.setSuccessRows(context.getStatistics().getSuccessRows());
            instance.setFailedRows(context.getStatistics().getFailedRows());
            
            LocalDateTime startTime = etlInstanceMapper.selectById(instanceId).getStartTime();
            long duration = java.time.Duration.between(startTime, instance.getEndTime()).toMillis();
            instance.setDuration(duration);
            
            etlInstanceMapper.updateById(instance);

            progress.setStatus("SUCCESS");
            log.info("ETL任务执行成功: taskId={}, instanceId={}", task.getId(), instanceId);

        } catch (Exception e) {
            log.error("ETL任务执行失败: taskId={}, instanceId={}", task.getId(), instanceId, e);

            // 更新实例状态为失败
            EtlInstance instance = new EtlInstance();
            instance.setId(instanceId);
            instance.setStatus("FAILED");
            instance.setEndTime(LocalDateTime.now());
            instance.setErrorMessage(e.getMessage());
            
            LocalDateTime startTime = etlInstanceMapper.selectById(instanceId).getStartTime();
            long duration = java.time.Duration.between(startTime, instance.getEndTime()).toMillis();
            instance.setDuration(duration);
            
            etlInstanceMapper.updateById(instance);

            progress.setStatus("FAILED");
            progress.setErrorMessage(e.getMessage());
        }
    }

    @Override
    public void stop(Long instanceId) {
        EtlInstance instance = new EtlInstance();
        instance.setId(instanceId);
        instance.setStatus("STOPPED");
        instance.setEndTime(LocalDateTime.now());
        etlInstanceMapper.updateById(instance);

        EtlProgress progress = progressMap.get(instanceId);
        if (progress != null) {
            progress.setStatus("STOPPED");
        }
    }

    @Override
    public EtlProgress getProgress(Long instanceId) {
        return progressMap.getOrDefault(instanceId, new EtlProgress());
    }
}
