package cn.org.intelli.zjgflink.controller;

import cn.org.intelli.zjgflink.aop.AuthenticationTicket;
import cn.org.intelli.zjgflink.domain.BackBean;
import cn.org.intelli.zjgflink.entity.FlinkJobEntity;
import cn.org.intelli.zjgflink.repository.FlinkJobRepository;
import cn.org.intelli.zjgflink.service.FlinkJobService;
import cn.org.intelli.zjgflink.service.HdfsOpService;
import cn.org.intelli.zjgflink.util.Uid;
import com.alibaba.fastjson.JSONArray;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Sort;
import org.springframework.data.repository.query.Param;
import org.springframework.web.bind.annotation.*;

import javax.persistence.criteria.Predicate;
import java.util.*;

@RestController
@RequestMapping({"/flink"})
public class FlinkJobController {
    private static final Logger log = LoggerFactory.getLogger(cn.org.intelli.zjgflink.controller.FlinkJobController.class);

    @Autowired
    private FlinkJobRepository flinkJobRepository;

    @Autowired
    private FlinkJobService flinkJobService;

    @Autowired
    private HdfsOpService hdfsOpService;

    @Value("${flink.hdfs.job}")
    private String flinkHdfsJob;

    @GetMapping({"/getFlinkJobList"})
    public BackBean<JSONArray> getFlinkJobList(
            @RequestParam(name = "page", defaultValue = "1") Integer page,
            @RequestParam(name = "pageSize", defaultValue = "10") Integer pageSize,
            @Param("flinkJobName") String flinkJobName) {
        try {
            Sort sort = Sort.by(Sort.Direction.DESC, "createTime");
            PageRequest pageRequest = PageRequest.of(page - 1, pageSize, sort);

            // 使用Lambda表达式简化
            Page<FlinkJobEntity> jobPage = this.flinkJobRepository.findAll((root, query, cb) -> {
                List<Predicate> predicates = new ArrayList<>();

                if (StringUtils.isNotBlank(flinkJobName)) {
                    predicates.add(cb.like(root.get("flinkJobName"), "%" + flinkJobName + "%"));
                }


                // 可以添加其他查询条件
                // if (其他条件) {
                //     predicates.add(cb.equal(root.get("字段名"), 值));
                // }
                return cb.and(predicates.toArray(new Predicate[0]));
            }, pageRequest);

            JSONArray result = this.flinkJobService.transFlinkJobs(jobPage.getContent());

            return BackBean.<JSONArray>builder()
                    .status(true)
                    .data(result)
                    .totalNum(jobPage.getTotalElements())
                    .build();
        } catch (Exception e) {
            e.printStackTrace();
            log.error("@GetFlinkJobList: error is {}", e.getMessage());
            return BackBean.<JSONArray>builder()
                    .status(false)
                    .message(e.getMessage())
                    .build();
        }
    }

    @PostMapping({"/createFlinkJob"})
    public BackBean<String> createFlinkJob(@RequestBody FlinkJobEntity flinkJob) {
        try {
            String flinkJobId = Uid.generateShortUuid();
            String userPin = AuthenticationTicket.getTicket().getPin();
            flinkJob.setFlinkJobId(flinkJobId);
            flinkJob.setUserPin(userPin);
            flinkJob.setStatus("CREATED");
            flinkJob.setCreateTime(new Date());
            this.flinkJobRepository.save(flinkJob);
            return BackBean.<String>builder().status(true).data("success").build();
        } catch (Exception e) {
            e.printStackTrace();
            log.error("@CreateFlinkJob: error is {}", e.getMessage());
            return BackBean.<String>builder().status(false).message(e.getMessage()).build();
        }
    }

    @PostMapping({"/updateFlinkJob/{flinkJobId}"})
    public BackBean<String> updateFlinkJob(@PathVariable("flinkJobId") String flinkJobId, @RequestBody FlinkJobEntity flinkJob) {
        try {
            FlinkJobEntity oriJob = this.flinkJobRepository.findById(flinkJobId).get();
            String status = oriJob.getStatus();
            if (StringUtils.equals(status, "RUNNING") ||
                    StringUtils.equals(status, "DEPLOYING"))
                throw new Exception("");
            if (StringUtils.isNotEmpty(flinkJob.getFlinkJobName()))
                oriJob.setFlinkJobName(flinkJob.getFlinkJobName());
            if (flinkJob.getSlotNumber() != null)
                oriJob.setSlotNumber(flinkJob.getSlotNumber());
            if (flinkJob.getParallelism() != null)
                oriJob.setParallelism(flinkJob.getParallelism());
            if (StringUtils.isNotEmpty(flinkJob.getMainFunction()))
                oriJob.setMainFunction(flinkJob.getMainFunction());
            if (StringUtils.isNotEmpty(flinkJob.getJarFileName()))
                oriJob.setJarFileName(flinkJob.getJarFileName());
            if (StringUtils.isNotEmpty(flinkJob.getDescription()))
                oriJob.setDescription(flinkJob.getDescription());
            if (StringUtils.isNotEmpty(flinkJob.getArgsStr()))
                oriJob.setArgsStr(flinkJob.getArgsStr());
            this.flinkJobRepository.save(oriJob);
            return BackBean.<String>builder().status(true).data("success").build();
        } catch (Exception e) {
            e.printStackTrace();
            log.error("@UpdateFlinkJob: error is {}", e.getMessage());
            return BackBean.<String>builder().status(false).message(e.getMessage()).build();
        }
    }

    @PostMapping({"/deployFlinkJobsOnYarn"})
    public BackBean<String> deployFlinkJobsOnYarn(@RequestBody JSONArray flinkJobIds) {
        try {
            List<String> flinkJobIdList = flinkJobIds.toJavaList(String.class);
            List<FlinkJobEntity> flinkJobs = this.flinkJobRepository.findAllByFlinkJobIdIn(flinkJobIdList);
            Map<String, FlinkJobEntity> cache = new HashMap<>();
            List<FlinkJobEntity> toDeploy = new ArrayList<>();
            for (FlinkJobEntity flinkJob : flinkJobs) {
                String status = flinkJob.getStatus();
                if (StringUtils.equals(status, "RUNNING") ||
                        StringUtils.equals(status, "DEPLOYING") ||
                        StringUtils.equals(status, "STOPPING")) {
                    cache.put(flinkJob.getFlinkJobId(), flinkJob);
                    continue;
                }
                toDeploy.add(flinkJob);
            }
            String message = null;
            if (cache.size() > 0)
                message = "+ cache.size() + ";
            if (toDeploy.size() > 0)
                this.flinkJobService.deployFlinkJobsOnYarn(toDeploy);
            return BackBean.<String>builder().status(true).data("success").message(message).build();
        } catch (Exception e) {
            e.printStackTrace();
            log.error("@DeployFlinkJobsOnYarn: error is {}", e.getMessage());
            return BackBean.<String>builder().status(false).message(e.getMessage()).build();
        }
    }

    @PostMapping({"/stopFlinkJobsOnYarn"})
    public BackBean<String> stopFlinkJobsOnYarn(@RequestBody JSONArray flinkJobIds) {
        try {
            List<String> flinkJobIdList = flinkJobIds.toJavaList(String.class);
            List<FlinkJobEntity> flinkJobs = this.flinkJobRepository.findAllByFlinkJobIdIn(flinkJobIdList);
            List<FlinkJobEntity> flink2Stop = new ArrayList<>();
            Map<String, FlinkJobEntity> cache = new HashMap<>();
            flinkJobs.forEach(flinkJob -> {
                String status = flinkJob.getStatus();
                if (StringUtils.equals(status, "RUNNING")) {
                    flink2Stop.add(flinkJob);
                } else {
                    cache.put(flinkJob.getFlinkJobId(), flinkJob);
                }
            });
            String message = null;
            if (cache.size() > 0)
                message = "+ cache.size() + ";
            this.flinkJobService.stopFlinkJobsOnYarn(flinkJobs);
            return BackBean.<String>builder().status(true).data("success").message(message).build();
        } catch (Exception e) {
            e.printStackTrace();
            log.error("@StopFlinkJobsOnYarn: error is {}", e.getMessage());
            return BackBean.<String>builder().status(false).message(e.getMessage()).build();
        }
    }

    @PostMapping({"/deleteFlinkJob/{flinkJobId}"})
    public BackBean<String> deleteFlinkJob(@PathVariable("flinkJobId") String flinkJobId) {
        try {
            FlinkJobEntity flinkJob = this.flinkJobRepository.findById(flinkJobId).get();
            String status = flinkJob.getStatus();
            if (!StringUtils.equals(status, "STOPPED") && !StringUtils.equals(status, "FAILED"))
                throw new Exception("");
            String path = this.flinkHdfsJob + "/" + flinkJobId;
            this.hdfsOpService.deleteFileOrDirectory(path);
            this.flinkJobRepository.deleteById(flinkJobId);
            return BackBean.<String>builder().status(true).data("success").build();
        } catch (Exception e) {
            e.printStackTrace();
            log.error("@DeleteFlinkJob: error is {}", e.getMessage());
            return BackBean.<String>builder().status(false).message(e.getMessage()).build();
        }
    }
}
