package cn.org.intelli.zjgflink.service;

import cn.org.intelli.zjgflink.entity.FlinkJobEntity;
import cn.org.intelli.zjgflink.repository.FlinkJobRepository;
import cn.org.intelli.zjgflink.util.FlinkJobDeploy;
import cn.org.intelli.zjgflink.util.FlinkJobStop;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.google.common.util.concurrent.*;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.util.*;
import java.util.concurrent.*;

@Service
public class FlinkJobService {
    private static final Logger log = LoggerFactory.getLogger(cn.org.intelli.zjgflink.service.FlinkJobService.class);
    ExecutorService executorService = new ThreadPoolExecutor(0, 30, 60L, TimeUnit.SECONDS, new SynchronousQueue<>());
    ListeningExecutorService listeningExecutorService = MoreExecutors.listeningDecorator(this.executorService);
    @Autowired
    private FlinkJobRepository flinkJobRepository;
    @Autowired
    private FlinkJobDeploy flinkJobDeploy;
    @Autowired
    private FlinkJobStop flinkJobStop;

    public JSONArray transFlinkJobs(List<FlinkJobEntity> flinkJobs) throws Exception {
        List<String> yarnApplicationIds = new ArrayList<>();
        flinkJobs.forEach(flinkJob -> {
            if (StringUtils.isNotEmpty(flinkJob.getYarnApplicationId()))
                yarnApplicationIds.add(flinkJob.getYarnApplicationId());
        });
        if (yarnApplicationIds.size() == 0)
            return JSON.parseArray(JSON.toJSONString(flinkJobs));
        Map<String, JSONObject> infoCache = getFlinkJobInfoOnYarn(yarnApplicationIds);
        JSONArray result = new JSONArray();
        flinkJobs.forEach(flinkJob -> {
            JSONObject atom = JSONObject.parseObject(JSON.toJSONString(flinkJob));
            if (StringUtils.isNotEmpty(flinkJob.getYarnApplicationId())) {
                JSONObject info = infoCache.get(flinkJob.getYarnApplicationId());
                if (info != null && info.size() > 0) {
                    atom.putAll(info);
                    String status = info.getString("status");
                    flinkJob.setStatus(status);
                } else {
                    atom.put("status", "STOPPED");
                    flinkJob.setStatus("STOPPED");
                }
            }
            result.add(atom);
        });
        this.flinkJobRepository.saveAll(flinkJobs);
        return result;
    }

    public Map<String, JSONObject> getFlinkJobInfoOnYarn(List<String> yarnApplicationIds) throws Exception {
        YarnClient yarnClient = YarnClient.createYarnClient();
        YarnConfiguration yarnConfiguration = new YarnConfiguration();
        yarnClient.init(yarnConfiguration);
        yarnClient.start();
        Map<String, JSONObject> appState = new HashMap<>();
        try {
            for (String yarnApplicationId : yarnApplicationIds) {
                try {
                    ApplicationReport report = yarnClient.getApplicationReport(ConverterUtils.toApplicationId(yarnApplicationId));
                    String reportStatus = report.getYarnApplicationState().toString();
                    JSONObject jobInfoOnYarn = new JSONObject();
                    String status = transYarnAppState(reportStatus);
                    jobInfoOnYarn.put("status", status);
                    if (EnumSet.of(YarnApplicationState.RUNNING, YarnApplicationState.FINISHED, YarnApplicationState.FAILED, YarnApplicationState.KILLED).contains(report.getYarnApplicationState())) {
                        String trackingUrl = report.getTrackingUrl();
                        jobInfoOnYarn.put("trackingUrl", trackingUrl);
                    }
                    appState.put(yarnApplicationId, jobInfoOnYarn);
                } catch (Exception e) {
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
            throw new Exception(e);
        } finally {
            yarnClient.stop();
        }
        return appState;
    }

    private String transYarnAppState(String state) {
        String transState = null;
        List<String> deployingState = new ArrayList<>();
        deployingState.add("NEW");
        deployingState.add("NEW_SAVING");
        deployingState.add("SUBMITTED");
        deployingState.add("ACCEPTED");
        List<String> stopState = new ArrayList<>();
        stopState.add("FINISHED");
        stopState.add("KILLED");
        if (deployingState.contains(state)) {
            transState = "DEPLOYING";
        } else if (stopState.contains(state)) {
            transState = "STOPPED";
        } else if (StringUtils.equals(state, "RUNNING")) {
            transState = "RUNNING";
        } else if (StringUtils.equals(state, "FAILED")) {
            transState = "FAILED";
        } else {
            transState = null;
        }
        return transState;
    }

    public void deployFlinkJobsOnYarn(List<FlinkJobEntity> flinkJobs) {
        for (FlinkJobEntity flinkJob : flinkJobs)
            deployFlinkJobOnYarn(flinkJob);
    }

    public void deployFlinkJobOnYarn(FlinkJobEntity flinkJob) {
        flinkJob.setStatus("DEPLOYING");
        this.flinkJobRepository.save(flinkJob);
        log.info("@DeployFlinkJob: start to deploy job {}, jobId is {}",
                flinkJob.getFlinkJobName(), flinkJob.getFlinkJobId());

        // 使用Lambda表达式
        ListenableFuture<String> future = this.listeningExecutorService.submit(() ->
                flinkJobDeploy.deployFlinkJob(flinkJob.getFlinkJobId(),
                        flinkJob.getFlinkJobName(), flinkJob.getSlotNumber(),
                        flinkJob.getMainFunction(), flinkJob.getJarFileName(),
                        flinkJob.getSavePointPath(), flinkJob.getParallelism(),
                        flinkJob.getJobManagerProcessSize(), flinkJob.getTaskManagerProcessSize(),
                        flinkJob.getArgsStr().split(" ")
                ));

        Futures.addCallback(future, new FutureCallback<String>() {
            @Override
            public void onSuccess(String result) {
                flinkJob.setYarnApplicationId(result);
                flinkJob.setStatus("RUNNING");
                flinkJobRepository.save(flinkJob);
                log.info("@DeployFlinkJob: deploy job {} successfully, applicationId: {}",
                        flinkJob.getFlinkJobName(), result);
            }

            @Override
            public void onFailure(Throwable t) {
                flinkJob.setStatus("FAILED");
                flinkJobRepository.save(flinkJob);
                log.error("@DeployFlinkJob: deploy job {} failed, error: {}",
                        flinkJob.getFlinkJobName(), t.getMessage());
            }
        }, this.listeningExecutorService); // 添加执行器参数
    }

    public void stopFlinkJobsOnYarn(List<FlinkJobEntity> flinkJobs) {
        for (FlinkJobEntity flinkJob : flinkJobs)
            stopFlinkJobOnYarn(flinkJob);
    }

    public void stopFlinkJobOnYarn(FlinkJobEntity flinkJob) {
        flinkJob.setStatus("STOPPING");
        this.flinkJobRepository.save(flinkJob);
        log.info("@StopFlinkJob: start to stop job {}, jobId is {}", flinkJob
                .getFlinkJobName(), flinkJob.getFlinkJobId());

        // 修复Callable实现
        ListenableFuture<String> future = this.listeningExecutorService.submit(new Callable<String>() {
            @Override
            public String call() throws Exception {
//                return flinkJobStop.stopFlinkJob(flinkJob);
                return flinkJobStop.stopFlinkJob(flinkJob.getYarnApplicationId(), flinkJob.getFlinkJobId());
            }
        });

        // 修复FutureCallback实现
        Futures.addCallback(future, new FutureCallback<String>() {
            @Override
            public void onSuccess(String result) {
                // 停止成功处理
                flinkJob.setStatus("STOPPED");
                flinkJobRepository.save(flinkJob);
                log.info("@StopFlinkJob: stop job {} successfully", flinkJob.getFlinkJobName());
            }

            @Override
            public void onFailure(Throwable t) {
                // 停止失败处理
                flinkJob.setStatus("FAILED");
                flinkJobRepository.save(flinkJob);
                log.error("@StopFlinkJob: stop job {} failed, error: {}",
                        flinkJob.getFlinkJobName(), t.getMessage());
            }
        }, listeningExecutorService);
    }
}
