package cn.getech.data.development.controller.real;

import cn.getech.data.development.config.properties.BdpJobConfig;
import cn.getech.data.development.config.properties.DataDevelopmentConfig;
import cn.getech.data.development.entity.flink.Node;
import cn.getech.data.development.model.vo.RealTimeTaskMenuSearchVO;
import cn.getech.data.development.service.FLinkService;
import cn.getech.data.development.service.RealTimeTaskOpsService;
import cn.getech.data.development.service.RealTimeTaskService;
import cn.getech.data.development.utils.HdfsUtil;
import cn.getech.data.development.utils.hadoop.HadoopLogUtils;
import cn.getech.data.development.utils.shell.ExecuteShellUtil;
import cn.getech.data.intelligence.common.utils.DateUtils;
import cn.getech.data.intelligence.common.utils.R;
import cn.getech.system.center.utils.ShiroUtils;
import cn.hutool.http.HttpUtil;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
import com.jcraft.jsch.ChannelSftp;
import com.jcraft.jsch.JSchException;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;

import java.nio.charset.StandardCharsets;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

@Slf4j
@Api(tags = "实时任务运维")
@RestController
@RequestMapping("/bdp/realTime/task/ops")
public class RealTimeTaskOpsController {

    @Autowired
    private RealTimeTaskOpsService realTimeTaskOpsService;
    @Autowired
    private DataDevelopmentConfig developmentConfig;
    @Autowired
    private BdpJobConfig bdpJobConfig;
    @ApiOperation("任务列表")
    @PostMapping("/page")
    public R page(@RequestBody RealTimeTaskMenuSearchVO vo) {
        return realTimeTaskOpsService.opsPage(vo, ShiroUtils.getUserId());
    }
    /**
     * 获取任务信息
     */
    private R getJobId(String appId) {
        if (StringUtils.isEmpty(appId)) {
            return R.error("应用id不能为空！");
        }
        String jobId = null;
        try {
            //application_1600679071671_15393/jobs/overview
            String url = developmentConfig.getFLinkImpl() + appId + "/jobs/overview";
            String data = HttpUtil.get(url, 5000);
            JSONObject job = JSONObject.parseObject(data);
            JSONArray jobs = job.getJSONArray("jobs");
            if (jobs != null && jobs.size() > 0) {
                jobId = jobs.getJSONObject(0).getString("jid");
            }
        } catch (Exception e) {
            return R.error("Hadoop不存在【" + appId + "】任务，请稍后尝试");
        }
        if (jobId == null) {
            return R.error("任务不存在！");
        }
        return R.okWithData(jobId);
    }

    @ApiOperation("任务总览")
    @GetMapping("/over/view")  //  /data-development/bdp/realTime/task/ops/over/view
    public R overView(String appId) {

        R r = getJobId(appId);
        if (!r.isOk()) {
            return r;
        }
        String jobId = r.get("data").toString();
        String url = developmentConfig.getFLinkImpl() + appId + "/jobs/" + jobId;
        String data = HttpUtil.get(url, 5000);
        JSONObject job = JSONObject.parseObject(data);
        if (job.getJSONArray("vertices") == null || job.getJSONObject("plan") == null) {
            log.error("flink错误:{}", data);
            return R.error("任务不存在！");
        }
        if (job.getJSONArray("vertices") != null) {
            JSONArray array = job.getJSONArray("vertices");
            for (Object o : array) {
                JSONObject ver = (JSONObject) o;
                if (ver.getLong("start-time") != null) {
                    Long startTime = ver.getLong("start-time");
                    String s = DateUtils.format(new Date(startTime), DateUtils.DATE_TIME_PATTERN);
                    ver.put("start-time", s);
                }
                if (ver.getLong("end-time") != null) {
                    Long endTime = ver.getLong("end-time");
                    String s = "--";
                    if (endTime != -1) {
                        s = DateUtils.format(new Date(endTime), DateUtils.DATE_TIME_PATTERN);
                    }
                    ver.put("end-time", s);
                }
                if (ver.getLong("duration") != null) {
                    Long startTime = ver.getLong("duration");
                    String s = DateUtils.secToTime(startTime/1000);
                    ver.put("duration", s);
                }
            }
        }
        if (job.getJSONObject("plan") != null) {
            JSONObject plan = job.getJSONObject("plan");
            JSONArray nodes = plan.getJSONArray("nodes");
            if (nodes != null) {
                List<Node> list = JSONArray.parseArray(nodes.toJSONString(), Node.class);
//                Map<String, Node> nodeMap = list.stream().collect(
//                        Collectors.toMap(Node::getId, s -> s));
                Map<String, List<Node>> groupMap = list.stream()
                        .filter(s -> CollectionUtils.isNotEmpty(s.getInputs()))
                        .collect(Collectors.groupingBy(s -> s.getInputs().get(0).getId()));
                int x = 1;
                for (Node node : list) {
                    if (CollectionUtils.isEmpty(node.getInputs())) {
                        node.setX(x);
                        node.setY(1);
                        List<Node> child = groupMap.get(node.getId());
                        initXY(node, child, groupMap);
                    }
                    x++;
                }
                plan.put("nodes", JSONArray.parseArray(JSONObject.toJSONString(list)));
            }
        }
        return R.okWithData(job);
    }

    private void initXY(Node parent, List<Node> child, Map<String, List<Node>> groupMap) {
        if (CollectionUtils.isNotEmpty(child)) {
            int y = 1;
            for (Node node : child) {
                node.setX(parent.getX() + 1);
                node.setY(y++);
                List<Node> nodes = groupMap.get(node.getId());
                initXY(node, nodes, groupMap);
            }
        }
    }


    @ApiOperation("任务管理")
    @GetMapping("/taskManagers")  // /data-development/bdp/realTime/task/ops/taskManagers
    public R taskManagers(String appId) {
        R r = getJobId(appId);
        if (!r.isOk()) {
            return r;
        }
        String jobId = r.get("data").toString();
        String url = developmentConfig.getFLinkImpl() + appId+"/jobs/" + jobId;
        String data = HttpUtil.get(url, 5000);
        JSONArray allData = new JSONArray();
        JSONObject job = JSONObject.parseObject(data);
        if (job.getJSONArray("vertices") != null) {
            JSONArray array = job.getJSONArray("vertices");
            for (Object o : array) {
                JSONObject vertices = (JSONObject) o;
                String id = vertices.getString("id");
                //jobs/jobId/vertices/f6dc7f4d2283f4605b127b9364e21148/taskmanagers
                String url2 = developmentConfig.getFLinkImpl() +appId+ "/jobs/" + jobId + "/vertices/" + id + "/taskmanagers";
                String res = HttpUtil.get(url2, 5000);
                JSONObject task = JSONObject.parseObject(res);
                if (task.getJSONArray("taskmanagers") != null) {
                    allData.addAll(task.getJSONArray("taskmanagers"));
                }
            }
            for (Object o : allData) {
                JSONObject ver = (JSONObject) o;
                if (ver.getLong("end-time") != null) {
                    String s = "--";
                    Long endTime = ver.getLong("end-time");
                    if (endTime != -1) {
                        s = DateUtils.format(new Date(endTime), DateUtils.DATE_TIME_PATTERN);
                    }
                    ver.put("end-time", s);
                }
                if (ver.getLong("start-time") != null) {
                    Long startTime = ver.getLong("start-time");
                    String s = DateUtils.format(new Date(startTime), DateUtils.DATE_TIME_PATTERN);
                    ver.put("start-time", s);
                }
                if (ver.getLong("duration") != null) {
                    Long startTime = ver.getLong("duration");
                    String s = DateUtils.secToTime(startTime/1000);
                    ver.put("duration", s);
                }
            }

        } else {
            log.error("flink错误:{}", data);
            return R.error("任务不存在！");
        }
        return R.okWithData(allData);
    }

    @ApiOperation("检查点")
    @GetMapping("/checkpoints")  //  /data-development/bdp/realTime/task/ops/checkpoints
    public R checkpoints(String appId) {
        R r = getJobId(appId);
        if (!r.isOk()) {
            return r;
        }
        String jobId = r.get("data").toString();
        ///jobs/cecb847460bffa34e36c1a2419eaf174/checkpoints
        String url = developmentConfig.getFLinkImpl() + appId+"/jobs/" + jobId + "/checkpoints";
        String data = HttpUtil.get(url, 5000);
        JSONObject check = JSONObject.parseObject(data);
        if (check.keySet().contains("counts")) {
            if (check.getJSONObject("latest") != null) {
                JSONObject completed = check.getJSONObject("latest").getJSONObject("completed");
                if (completed != null) {
                    //trigger_timestamp: 1600334632871
                    //latest_ack_timestamp: 1600334632895
                    if (completed.getLong("trigger_timestamp") != null) {
                        Long time = completed.getLong("trigger_timestamp");
                        String s = DateUtils.format(new Date(time),"HH:mm:ss");
                        completed.put("trigger_timestamp", s);
                    }
                    if (completed.getLong("latest_ack_timestamp") != null) {
                        Long time = completed.getLong("latest_ack_timestamp");
                        String s = DateUtils.format(new Date(time),"HH:mm:ss");
                        completed.put("latest_ack_timestamp", s);
                    }
                }
                //check.getJSONObject("latest").put("completed",completed);
            }
            return R.okWithData(check);
        } else {
            log.error("flink错误:{}", data);
            return R.error("任务不存在！");
        }
    }

    private JSONObject geiHdfsLog(String appId){
        JSONObject dataRes = new JSONObject();
        for (String defaultFS : bdpJobConfig.getNamenodestr().split(",")) {
            HadoopLogUtils.setDefaultFS(defaultFS);
            try {
                String s = HadoopLogUtils.printLog(appId, "hdfs");
                if (StringUtils.isNotEmpty(s)) {
                    dataRes.put("root-exception", s);
                    break;
                }
            } catch (Exception e) {
                log.warn("获取失败:{} appId:{}", defaultFS, appId, e);
            }
        }
        return dataRes;
    }
    @Autowired
    private FLinkService fLinkService;
    @ApiOperation("运行日志")
    @GetMapping(value = "/exceptions",
            produces = "application/json;charset=utf-8")  ///bdp/realTime/task/ops/exceptions
    public R exceptions(String appId) {
        JSONObject log  = new JSONObject();
        log.put("root-exception", "");
        if (StringUtils.isNotEmpty(appId) &&
                appId.contains("/usr/realtime/")) {
            ExecuteShellUtil resShell = ExecuteShellUtil.getInstance();
            try {
                resShell.init(developmentConfig.getFLinkHost(), developmentConfig.getFLinkPort(),
                        developmentConfig.getFLinkUserName(), developmentConfig.getFLinkPwd());
                String res = resShell.getFile(appId);
                res = new String(res.getBytes(StandardCharsets.UTF_8),StandardCharsets.UTF_8);
                if (res.contains("调试运行")) {
                    String data = fLinkService.getDebugData(appId);
                    if (StringUtils.isNotEmpty(data)) {
                        res = res + "\n" + data;
                    }
                }
                if (res.length() > 200000) {
                    res = res.substring(0, 200000);
                }
                log.put("root-exception", res);
                return R.okWithData(log);
            } catch (JSchException e) {
                return R.okWithData("");
            }
        }
        return R.okWithData(log);
    }
}
