package com.example.sparksubmitter.service;

import com.example.sparksubmitter.dto.SparkSqlRequest;
import com.example.sparksubmitter.dto.SparkSqlResponse;
import org.apache.spark.launcher.SparkLauncher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.*;
import java.util.concurrent.*;

@Service
public class SparkService {

    private static final Logger logger = LoggerFactory.getLogger(SparkService.class);
    private final Map<String, SparkSqlResponse> taskStatusMap = new ConcurrentHashMap<>();
    private final Map<String, Process> runningProcesses = new ConcurrentHashMap<>();
    private final ExecutorService executorService = Executors.newFixedThreadPool(10);

    /**
     * 提交 Spark SQL 任务
     */
    public CompletableFuture<SparkSqlResponse> submitSql(SparkSqlRequest request) {
        return CompletableFuture.supplyAsync(() -> {
            String taskId = UUID.randomUUID().toString();
            logger.info("提交 Spark SQL 任务: {}", taskId);

            SparkSqlResponse response = new SparkSqlResponse();
            response.setTaskId(taskId);
            response.setStatus(SparkSqlResponse.STATUS_SUBMITTED);
            response.setSubmitTime(new Date());
            response.setMessage("任务已提交，等待执行");

            taskStatusMap.put(taskId, response);

            try {
                // 准备 Spark 提交命令
                SparkLauncher launcher = buildSparkLauncher(request, taskId);
                
                // 启动 Spark 任务
                Process process = launcher.launch();
                runningProcesses.put(taskId, process);
                
                // 更新状态为运行中
                response.setStatus(SparkSqlResponse.STATUS_RUNNING);
                response.setMessage("任务正在运行中");
                taskStatusMap.put(taskId, response);
                
                // 异步处理任务输出
                handleProcessOutput(process, taskId);
                
                // 等待任务完成
                int exitCode = process.waitFor();
                
                // 移除运行中的进程
                runningProcesses.remove(taskId);
                
                // 更新最终状态
                if (exitCode == 0) {
                    response.setStatus(SparkSqlResponse.STATUS_SUCCEEDED);
                    response.setMessage("任务成功完成");
                    response.setCompletionTime(new Date());
                    response.setExecutionTime(response.getCompletionTime().getTime() - response.getSubmitTime().getTime());
                    response.setResultLocation(String.format("hdfs:///user/spark/output/%s", taskId));
                    response.setSparkUiUrl(String.format("http://spark-master:4040/%s", taskId));
                    response.setYarnAppUrl(String.format("http://yarn-resourcemanager:8088/proxy/%s", taskId));
                } else {
                    response.setStatus(SparkSqlResponse.STATUS_FAILED);
                    response.setMessage("任务执行失败，退出代码: " + exitCode);
                    response.setCompletionTime(new Date());
                }
                
                taskStatusMap.put(taskId, response);
                logger.info("Spark SQL 任务完成: {}, 状态: {}", taskId, response.getStatus());
                
            } catch (Exception e) {
                logger.error("提交 Spark SQL 任务时出错: {}", e.getMessage(), e);
                response.setStatus(SparkSqlResponse.STATUS_FAILED);
                response.setMessage("提交任务时出错: " + e.getMessage());
                response.setCompletionTime(new Date());
                taskStatusMap.put(taskId, response);
            }

            return response;
        }, executorService);
    }

    /**
     * 获取任务状态
     */
    public SparkSqlResponse getTaskStatus(String taskId) {
        return taskStatusMap.getOrDefault(taskId, createTaskNotFoundResponse(taskId));
    }

    /**
     * 取消任务
     */
    public SparkSqlResponse cancelTask(String taskId) {
        SparkSqlResponse response = taskStatusMap.get(taskId);
        
        if (response == null) {
            return createTaskNotFoundResponse(taskId);
        }
        
        if (!response.getStatus().equals(SparkSqlResponse.STATUS_RUNNING)) {
            response.setMessage("任务当前状态为 " + response.getStatus() + "，无法取消");
            return response;
        }
        
        try {
            Process process = runningProcesses.get(taskId);
            if (process != null) {
                process.destroy();
                response.setStatus(SparkSqlResponse.STATUS_CANCELLED);
                response.setMessage("任务已成功取消");
                response.setCompletionTime(new Date());
                taskStatusMap.put(taskId, response);
                runningProcesses.remove(taskId);
            } else {
                response.setMessage("无法找到运行中的进程，任务可能已完成");
            }
        } catch (Exception e) {
            logger.error("取消任务时出错: {}", e.getMessage(), e);
            response.setMessage("取消任务时出错: " + e.getMessage());
        }
        
        return response;
    }

    /**
     * 构建 SparkLauncher
     */
    private SparkLauncher buildSparkLauncher(SparkSqlRequest request, String taskId) {
        SparkLauncher launcher = new SparkLauncher();
        
        // 设置基本配置
        launcher.setSparkHome(request.getSparkHome())
                .setMaster(request.getMasterUrl())
                .setDeployMode(request.getDeployMode())
                .setAppName("SparkSQLTask-" + taskId)
                .setMainClass("org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver");
        
        // 设置资源配置
        launcher.setConf("spark.executor.memory", request.getExecutorMemory())
                .setConf("spark.driver.memory", request.getDriverMemory())
                .setConf("spark.executor.cores", String.valueOf(request.getExecutorCores()))
                .setConf("spark.executor.instances", String.valueOf(request.getNumExecutors()));
        
        // 设置 Hadoop 配置目录
        if (request.getHadoopConfDir() != null && !request.getHadoopConfDir().isEmpty()) {
            System.setProperty("HADOOP_CONF_DIR", request.getHadoopConfDir());
        }
        
        // 添加额外配置
        if (request.getAdditionalConf() != null) {
            for (Map.Entry<String, String> entry : request.getAdditionalConf().entrySet()) {
                launcher.setConf(entry.getKey(), entry.getValue());
            }
        }
        
        // 保存 SQL 到临时文件
        String sqlFilePath = saveSqlToTempFile(request.getSqlText(), taskId);
        launcher.addAppArgs("-f", sqlFilePath);
        
        return launcher;
    }

    /**
     * 保存 SQL 到临时文件
     */
    private String saveSqlToTempFile(String sqlText, String taskId) {
        try {
            java.io.File tempFile = java.io.File.createTempFile("spark-sql-" + taskId, ".sql");
            tempFile.deleteOnExit();
            
            java.io.FileWriter writer = new java.io.FileWriter(tempFile);
            writer.write(sqlText);
            writer.close();
            
            return tempFile.getAbsolutePath();
        } catch (IOException e) {
            logger.error("保存 SQL 到临时文件时出错: {}", e.getMessage(), e);
            throw new RuntimeException("无法保存 SQL 到临时文件", e);
        }
    }

    /**
     * 处理进程输出
     */
    private void handleProcessOutput(Process process, String taskId) {
        // 处理标准输出
        executorService.submit(() -> {
            try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()))) {
                String line;
                while ((line = reader.readLine()) != null) {
                    logger.info("Task {} stdout: {}", taskId, line);
                    // 可以在这里解析输出，提取进度信息
                }
            } catch (IOException e) {
                logger.error("读取进程标准输出时出错: {}", e.getMessage(), e);
            }
        });
        
        // 处理错误输出
        executorService.submit(() -> {
            try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getErrorStream()))) {
                String line;
                while ((line = reader.readLine()) != null) {
                    logger.error("Task {} stderr: {}", taskId, line);
                    // 可以在这里解析错误信息，更新任务状态
                }
            } catch (IOException e) {
                logger.error("读取进程错误输出时出错: {}", e.getMessage(), e);
            }
        });
    }

    /**
     * 创建任务未找到的响应
     */
    private SparkSqlResponse createTaskNotFoundResponse(String taskId) {
        SparkSqlResponse response = new SparkSqlResponse();
        response.setTaskId(taskId);
        response.setStatus("NOT_FOUND");
        response.setMessage("未找到任务 ID 对应的任务");
        return response;
    }
}    