package com.talkingdata.hadoop.guard.controller;

import com.talkingdata.hadoop.guard.bean.*;
import com.talkingdata.hadoop.guard.mapper.ClusterAliasMapper;
import com.talkingdata.hadoop.guard.mapper.RmMapper;
import com.talkingdata.hadoop.guard.mapper.SchedulerMapper;
import com.talkingdata.hadoop.guard.mapper.YarnClusterMapper;
import com.talkingdata.hadoop.guard.service.HadoopHandlerSerivce;
import com.talkingdata.hadoop.guard.util.LoadConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.servlet.ModelAndView;

import java.io.IOException;
import java.util.ArrayList;

@Controller
public class ClusterYarnStateController {

    @Autowired
    private YarnClusterMapper yarnClusterMapper;

    @Autowired
    private LoadConfig loadConfig;

    @Autowired
    private RmMapper rmMapper;

    @Autowired
    private SchedulerMapper sm;

    @Autowired
    private ClusterAliasMapper cam ;

    @Autowired
    private HadoopHandlerSerivce hadoopHandlerSerivce;

    @GetMapping({"/cluster_yarn_state.html"})
    public String clusterYarnState(Model model){
        ArrayList<XmlConfig> configure = loadConfig.readxml();
        ArrayList <YarnClusterState> yarnStateList  = new ArrayList<>();
        ArrayList <RmState> rmList  = new ArrayList<>();
        ArrayList <SchedulerState> schedulerList  = new ArrayList<>();
        ArrayList<YarnPage> yarnPagesList = new ArrayList<>();
        for(XmlConfig xc : configure){
            YarnClusterState yarnClusterState = null;
            String YarnNameServices = null;
            ClusterAlias ca = null ;
                try {
                    if ("bjxgapp".equals(xc.getServiceId())) {
                        YarnNameServices = "bjxgappyarn01";
                    } else if ("bjdcnameservices01".equals(xc.getServiceId())) {
                        YarnNameServices = "bjdchbase01yarn01";
                    } else {
                        hadoopHandlerSerivce.yarnInit(xc);
                        YarnNameServices = hadoopHandlerSerivce.getYarnNameServices();
                    }
                    RmState rmStates = null;
                    try {
                        yarnClusterState = yarnClusterMapper.selectYarnClusterStateByid(YarnNameServices);
                        rmStates = rmMapper.selectActiveRMStatebyClusterId(YarnNameServices);
                        ca = cam.selectClusterAliasByClusteryarnId(YarnNameServices);
                    } catch (Exception e) {
                        e.printStackTrace();
                        System.out.println(xc.getServiceId() + " yarnClusterState or rmStates conn db failed ");
                    }
                    if ("3.1.0".equals(xc.getVersion())) {
                        SchedulerState schedulerState = sm.selectSchedulerStateByClusterId(YarnNameServices);
                        schedulerList.add(schedulerState);
                    }
                    yarnStateList.add(yarnClusterState);
                    rmList.add(rmStates);
                    yarnPagesList.add(new YarnPage(rmStates,ca));

                } catch (Exception e) {
                    e.printStackTrace();
                }

        }
        model.addAttribute("yarnPagesList",yarnPagesList);
        model.addAttribute("yarnStateList",yarnStateList);
        model.addAttribute("rmList",rmList);
        model.addAttribute("schedulerList",schedulerList);
        return "cluster_yarn_state";
    }
}
