package com.example.sparkmanager.service.impl;

import com.example.sparkmanager.config.SparkConfig;
import com.example.sparkmanager.model.JobStatus;
import com.example.sparkmanager.model.SparkJob;
import com.example.sparkmanager.repository.SparkJobRepository;
import com.jcraft.jsch.ChannelExec;
import com.jcraft.jsch.JSch;
import com.jcraft.jsch.JSchException;
import com.jcraft.jsch.Session;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.time.LocalDateTime;

@Service
@Slf4j
@RequiredArgsConstructor
public class AsyncSparkExecutionService {
    private final SparkJobRepository jobRepository;
    private final SparkConfig sparkConfig;

    @Async
    public void executeSparkJobAsync(Long jobId) {
        log.info("Starting async spark job execution for job ID: {}", jobId);

        try {
            SparkJob job = jobRepository.findById(jobId)
                    .orElseThrow(() -> new RuntimeException("Job not found with id: " + jobId));

            // 执行实际的Spark任务
            String processId = submitSparkJob(job);
            job.setProcessId(processId);
            jobRepository.save(job);

            log.info("Spark job submitted successfully, job ID: {}, process ID: {}", jobId, processId);

        } catch (Exception e) {
            log.error("Async spark job execution failed for job ID: {}", jobId, e);
            updateJobStatus(jobId, JobStatus.FAILED, e.getMessage());
        }
    }


    private String submitSparkJob(SparkJob job) throws JSchException, IOException {
        JSch jsch = new JSch();
        Session session = jsch.getSession(
                sparkConfig.getUsername(),
                sparkConfig.getHost(),
                sparkConfig.getPort());

        session.setPassword(sparkConfig.getPassword());
        session.setConfig("StrictHostKeyChecking", "no");
        session.connect(50 * 1000); // 5秒连接超时

        // 使用nohup后台执行，并获取进程ID
        String sparkCommand = String.format("%s/bin/spark-submit --class %s %s %s",
                sparkConfig.getSparkHome(),
                job.getMainClass(),
                job.getJarPath(),
                job.getArguments() != null ? job.getArguments() : "");

        // 后台执行并获取PID
        String command = String.format(
                "nohup %s > %s/%s.out 2> %s/%s.err & echo $!",
                sparkCommand,
//                sparkConfig.getLogDir(),
                sparkConfig.getJarsDir(),
                job.getJobName() + "_" + job.getId(),
//                sparkConfig.getLogDir(),
                sparkConfig.getJarsDir(),
                job.getJobName() + "_" + job.getId()
        );

        log.info("执行Spark命令: {}", command);

        ChannelExec channel = (ChannelExec) session.openChannel("exec");
        channel.setCommand(command);

        ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
        channel.setOutputStream(outputStream);
        channel.connect();

        // 等待命令执行完成（获取PID）
        while (!channel.isClosed()) {
            try {
                Thread.sleep(100);
            } catch (InterruptedException e) {
                Thread.currentThread().interrupt();
                break;
            }
        }

        String output = outputStream.toString().trim();
        channel.disconnect();
        session.disconnect();

        if (output.isEmpty()) {
            throw new RuntimeException("Failed to get process ID");
        }

        return output; // 返回进程ID
    }

    private void updateJobStatus(Long jobId, JobStatus status, String errorMessage) {
        try {
            SparkJob job = jobRepository.findById(jobId)
                    .orElseThrow(() -> new RuntimeException("Job not found"));

            job.setStatus(status);
            if (errorMessage != null) {
                job.setErrorMessage(errorMessage);
            }
            job.setEndTime(LocalDateTime.now());
            jobRepository.save(job);

        } catch (Exception e) {
            log.error("Failed to update job status for job ID: {}", jobId, e);
        }
    }
}
