package com.example.sparkmanager.service.impl;

import com.example.sparkmanager.config.SparkConfig;
import com.example.sparkmanager.dto.JobRequest;
import com.example.sparkmanager.dto.JobResponse;
import com.example.sparkmanager.dto.ScheduleRequest;
import com.example.sparkmanager.model.JobStatus;
import com.example.sparkmanager.model.SparkJob;
import com.example.sparkmanager.repository.SparkJobRepository;
import com.example.sparkmanager.service.SparkJobService;
import com.example.sparkmanager.task.ScheduledJobTask;
import com.jcraft.jsch.ChannelExec;
import com.jcraft.jsch.JSch;
import com.jcraft.jsch.JSchException;
import com.jcraft.jsch.Session;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.modelmapper.ModelMapper;
import org.quartz.*;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.time.LocalDateTime;
import java.util.List;
import java.util.stream.Collectors;

@Service
@RequiredArgsConstructor
@Slf4j
public class SparkJobServiceImpl implements SparkJobService {
    private final SparkJobRepository jobRepository;
    private final ModelMapper modelMapper;
    private final SparkConfig sparkConfig;
    private final SchedulerFactoryBean schedulerFactoryBean;
    private final AsyncSparkExecutionService asyncExecutionService;

    @Override
    @Transactional
    public JobResponse createJob(JobRequest jobRequest) {
        jobRequest.setJarPath(sparkConfig.getJarsDir() + jobRequest.getJarPath());
        SparkJob job = modelMapper.map(jobRequest, SparkJob.class);
        job.setStatus(JobStatus.SCHEDULED);
        job = jobRepository.save(job);

        if (jobRequest.getScheduleExpression() != null && !jobRequest.getScheduleExpression().isEmpty()) {
            scheduleJob(job);
        }

        return modelMapper.map(job, JobResponse.class);
    }

    @Override
    public JobResponse getJobById(Long id) {
        SparkJob job = jobRepository.findById(id)
                .orElseThrow(() -> new RuntimeException("Job not found with id: " + id));
        return modelMapper.map(job, JobResponse.class);
    }

    @Override
    public List<JobResponse> getAllJobs() {
        return jobRepository.findAll().stream()
                .map(job -> modelMapper.map(job, JobResponse.class))
                .collect(Collectors.toList());
    }

    @Override
    @Transactional
    public JobResponse updateJobSchedule(Long id, ScheduleRequest scheduleRequest) {
        SparkJob job = jobRepository.findById(id)
                .orElseThrow(() -> new RuntimeException("Job not found with id: " + id));

        // Unschedule existing job if it exists
        unscheduleJob(job);

        // Update schedule
        job.setScheduleExpression(scheduleRequest.getScheduleExpression());
        job = jobRepository.save(job);

        // Schedule with new expression
        if (scheduleRequest.getScheduleExpression() != null && !scheduleRequest.getScheduleExpression().isEmpty()) {
            scheduleJob(job);
        }

        return modelMapper.map(job, JobResponse.class);
    }

    @Override
    @Transactional
    public void deleteJob(Long id) {
        SparkJob job = jobRepository.findById(id)
                .orElseThrow(() -> new RuntimeException("Job not found with id: " + id));

        unscheduleJob(job);
        jobRepository.delete(job);
    }

   /* @Override
    @Transactional
    public JobResponse triggerJob(Long id) {
        SparkJob job = jobRepository.findById(id)
                .orElseThrow(() -> new RuntimeException("Job not found with id: " + id));

        try {
            executeSparkJob(job);
            job.setStatus(JobStatus.RUNNING);
            job = jobRepository.save(job);
            return modelMapper.map(job, JobResponse.class);
        } catch (Exception e) {
            job.setStatus(JobStatus.FAILED);
            job.setErrorLog(e.getMessage());
            job = jobRepository.save(job);
            throw new RuntimeException("Failed to trigger job: " + e.getMessage(), e);
        }
    }*/

    @Override
    public JobResponse triggerJob(Long id) {
        SparkJob job = jobRepository.findById(id)
                .orElseThrow(() -> new RuntimeException("Job not found with id: " + id));

       /* if (job.getStatus() == JobStatus.RUNNING) {
            throw new RuntimeException("Job is already running");
        }*/

        // 立即更新状态为运行中
        job.setStatus(JobStatus.RUNNING);
        job.setStartTime(LocalDateTime.now());
        job.setErrorMessage(null);
        SparkJob savedJob = jobRepository.save(job);

        // 使用异步服务执行
        asyncExecutionService.executeSparkJobAsync(savedJob.getId());

        return modelMapper.map(job, JobResponse.class);
    }

    @Override
    public JobStatus checkJobStatus(Long jobId) {
        SparkJob job = jobRepository.findById(jobId)
                .orElseThrow(() -> new RuntimeException("Job not found"));

        if (job.getProcessId() == null) {
            return job.getStatus();
        }

        try {
            boolean isRunning = isProcessRunning(job.getProcessId());
            if (!isRunning && job.getStatus() == JobStatus.RUNNING) {
                // 进程已结束，检查执行结果，并改变数据库中的执行状态值
                checkJobResult(job);
            }
            return job.getStatus();

        } catch (Exception e) {
            log.error("Error checking job status: {}", jobId, e);
            return job.getStatus();
        }
    }

    private boolean isProcessRunning(String processId) throws JSchException, IOException {
        JSch jsch = new JSch();
        Session session = jsch.getSession(
                sparkConfig.getUsername(),
                sparkConfig.getHost(),
                sparkConfig.getPort());

        session.setPassword(sparkConfig.getPassword());
        session.setConfig("StrictHostKeyChecking", "no");
        session.connect(50 * 1000);

        String command = "ps -p " + processId + " > /dev/null 2>&1 && echo 'RUNNING' || echo 'STOPPED'";
        log.info("检查进程命令：{}", command);

        ChannelExec channel = (ChannelExec) session.openChannel("exec");
        channel.setCommand(command);

        ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
        channel.setOutputStream(outputStream);
        channel.connect();

        while (!channel.isClosed()) {
            try {
                Thread.sleep(100);
            } catch (InterruptedException e) {
                Thread.currentThread().interrupt();
                break;
            }
        }

        String output = outputStream.toString().trim();
        channel.disconnect();
        session.disconnect();

        return "RUNNING".equals(output);
    }

    private void checkJobResult(SparkJob job) throws JSchException, IOException {
        // 检查日志文件获取执行结果
//        String logFile = sparkConfig.getLogDir() + "/" + job.getJobName() + "_" + job.getId() + ".out";
        String logFile = sparkConfig.getJarsDir() + "/" + job.getJobName() + "_" + job.getId() + ".out";
//        String errorFile = sparkConfig.getLogDir() + "/" + job.getJobName() + "_" + job.getId() + ".err";
        String errorFile = sparkConfig.getJarsDir() + "/" + job.getJobName() + "_" + job.getId() + ".err";

        JSch jsch = new JSch();
        Session session = jsch.getSession(
                sparkConfig.getUsername(),
                sparkConfig.getHost(),
                sparkConfig.getPort());

        session.setPassword(sparkConfig.getPassword());
        session.setConfig("StrictHostKeyChecking", "no");
        session.connect(5000);

        // 检查错误文件是否有内容
        String checkErrorCommand = String.format(
                "if [ -s %s ]; then echo 'ERROR'; else echo 'SUCCESS'; fi",
                errorFile
        );

        ChannelExec channel = (ChannelExec) session.openChannel("exec");
        channel.setCommand(checkErrorCommand);

        ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
        channel.setOutputStream(outputStream);
        channel.connect();

        while (!channel.isClosed()) {
            try {
                Thread.sleep(100);
            } catch (InterruptedException e) {
                Thread.currentThread().interrupt();
                break;
            }
        }

        String result = outputStream.toString().trim();
        channel.disconnect();

        // 读取日志内容
        String logContent = readRemoteFile(session, logFile);
        String errorContent = readRemoteFile(session, errorFile);

        session.disconnect();

        job.setOutputLog(logContent);
        job.setErrorLog(errorContent);

        if ("ERROR".equals(result) || !errorContent.isEmpty()) {
            job.setStatus(JobStatus.FAILED);
        } else {
            job.setStatus(JobStatus.COMPLETED);
        }

        job.setEndTime(LocalDateTime.now());
        jobRepository.save(job);
    }

    private String readRemoteFile(Session session, String filePath) throws JSchException, IOException {
        try {
            ChannelExec channel = (ChannelExec) session.openChannel("exec");
            channel.setCommand("cat " + filePath + " 2>/dev/null || echo ''");

            ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
            channel.setOutputStream(outputStream);
            channel.connect();

            while (!channel.isClosed()) {
                try {
                    Thread.sleep(100);
                } catch (InterruptedException e) {
                    Thread.currentThread().interrupt();
                    break;
                }
            }

            String content = outputStream.toString();
            channel.disconnect();
            return content;

        } catch (Exception e) {
            log.warn("Failed to read remote file: {}", filePath);
            return "";
        }
    }

    @Override
    @Transactional
    public JobResponse cancelJob(Long id) {
        SparkJob job = jobRepository.findById(id)
                .orElseThrow(() -> new RuntimeException("Job not found with id: " + id));

        if (job.getRemoteJobId() != null && !job.getRemoteJobId().isEmpty()) {
            try {
                cancelRemoteJob(job);
                job.setStatus(JobStatus.CANCELLED);
                job = jobRepository.save(job);
            } catch (Exception e) {
                throw new RuntimeException("Failed to cancel job: " + e.getMessage(), e);
            }
        }

        return modelMapper.map(job, JobResponse.class);
    }

    @Override
    public List<JobResponse> getJobsByStatus(String status) {
        JobStatus jobStatus = JobStatus.valueOf(status.toUpperCase());
        return jobRepository.findByStatus(jobStatus).stream()
                .map(job -> modelMapper.map(job, JobResponse.class))
                .collect(Collectors.toList());
    }

    private void scheduleJob(SparkJob job) {
        try {
            Scheduler scheduler = schedulerFactoryBean.getScheduler();

            JobDetail jobDetail = JobBuilder.newJob(ScheduledJobTask.class)
                    .withIdentity("job_" + job.getId(), "spark_jobs")
                    .usingJobData("jobId", job.getId())
                    .build();

            Trigger trigger = TriggerBuilder.newTrigger()
                    .withIdentity("trigger_" + job.getId(), "spark_jobs")
                    .withSchedule(CronScheduleBuilder.cronSchedule(job.getScheduleExpression()))
                    .build();

            scheduler.scheduleJob(jobDetail, trigger);
        } catch (SchedulerException e) {
            throw new RuntimeException("Failed to schedule job: " + e.getMessage(), e);
        }
    }

    private void unscheduleJob(SparkJob job) {
        try {
            Scheduler scheduler = schedulerFactoryBean.getScheduler();
            TriggerKey triggerKey = new TriggerKey("trigger_" + job.getId(), "spark_jobs");
            scheduler.unscheduleJob(triggerKey);
        } catch (SchedulerException e) {
            throw new RuntimeException("Failed to unschedule job: " + e.getMessage(), e);
        }
    }

    private void executeSparkJob(SparkJob job) throws JSchException, IOException {
        JSch jsch = new JSch();
        Session session = jsch.getSession(
                sparkConfig.getUsername(),
                sparkConfig.getHost(),
                sparkConfig.getPort());

        session.setPassword(sparkConfig.getPassword());
        session.setConfig("StrictHostKeyChecking", "no");
        session.connect();

        String command = String.format("%s/bin/spark-submit --class %s %s %s",
                sparkConfig.getSparkHome(),
                job.getMainClass(),
                job.getJarPath(),
                job.getArguments() != null ? job.getArguments() : "");
        log.info("执行命令：{}", command);

        ChannelExec channel = (ChannelExec) session.openChannel("exec");
        channel.setCommand(command);

        ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
        ByteArrayOutputStream errorStream = new ByteArrayOutputStream();

        channel.setOutputStream(outputStream);
        channel.setErrStream(errorStream);
        channel.connect();

        while (!channel.isClosed()) {
            try {
                Thread.sleep(1000);
            } catch (InterruptedException e) {
                Thread.currentThread().interrupt();
            }
        }

        String output = outputStream.toString();
        String error = errorStream.toString();

        job.setOutputLog(output);
        job.setErrorLog(error);
        job.setRemoteJobId(extractJobId(output));

        channel.disconnect();
        session.disconnect();
    }

    private void cancelRemoteJob(SparkJob job) throws JSchException, IOException {
        JSch jsch = new JSch();
        Session session = jsch.getSession(
                sparkConfig.getUsername(),
                sparkConfig.getHost(),
                sparkConfig.getPort());

        session.setPassword(sparkConfig.getPassword());
        session.setConfig("StrictHostKeyChecking", "no");
        session.connect();

        String command = String.format("yarn application -kill %s", job.getRemoteJobId());

        ChannelExec channel = (ChannelExec) session.openChannel("exec");
        channel.setCommand(command);

        ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
        ByteArrayOutputStream errorStream = new ByteArrayOutputStream();

        channel.setOutputStream(outputStream);
        channel.setErrStream(errorStream);
        channel.connect();

        while (!channel.isClosed()) {
            try {
                Thread.sleep(1000);
            } catch (InterruptedException e) {
                Thread.currentThread().interrupt();
            }
        }

        String output = outputStream.toString();
        String error = errorStream.toString();

        job.setOutputLog(job.getOutputLog() + "\nCancellation Output:\n" + output);
        job.setErrorLog(job.getErrorLog() + "\nCancellation Error:\n" + error);

        channel.disconnect();
        session.disconnect();
    }

    private String extractJobId(String output) {
        // Parse the output to extract the Spark job ID
        // This is a simple example, you may need to adjust based on your Spark version
        if (output.contains("Submitted application")) {
            return output.split("Submitted application ")[1].split("\n")[0];
        }
        return null;
    }
}