package com.ideal.hadoopadmin.crontab.kerberos;

import com.ideal.hadoopadmin.crontab.db.ConnectionManager;
import com.ideal.hadoopadmin.crontab.property.Properties;
import com.ideal.hadoopadmin.crontab.tool.Tools;
import com.ideal.tools.ssh.common.CommonProperties;
import com.ideal.tools.ssh.common.CommonTools;
import com.ideal.tools.ssh.common.OperationMarket;
import com.ideal.tools.ssh.context.ClusterContext;
import com.ideal.tools.ssh.entity.LinuxMachine;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.*;
import java.sql.Connection;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;

public class KerberosAPI {
    public KerberosAPI() {
        Properties.initMySQL();
    }
    private static Logger logger = LoggerFactory.getLogger(KerberosAPI.class);

    /**
     * 1、生成执行脚本
     * 2、运行脚本生成log文件
     * 3、读取log文件插入到cluster_user_kbrauth
     * @param context
     */
    public void FlushKerberos(ClusterContext context) {
        StringBuffer sql = new StringBuffer();
        List<String> sqlList = new ArrayList<String>();
        String temTime = Tools.getCurrentDate();
        String shellpath = context.getCommonProperties().getProperty(CommonProperties.WEBAPP_SHELL_PATH, "");//脚本生成路径
        String kerberos_sh = context.getCommonProperties().getProperty(Properties.Kerberos_EXE_FileName, "") + temTime + ".sh";//生成脚本的名字sh
        String initPath = context.getCommonProperties().getProperty(CommonProperties.WEBAPP_INIT_PATH, "") + "/";
        String fileName = context.getCommonProperties().getProperty(Properties.Kerberos_INFO_FILENAME, "") + temTime + ".txt";
        String kerberosFileName = initPath + fileName;
        //封装clusterMachMap   key->id  value->value
        HashMap<String, String> clusterMachMap = getClusterMachMap(sql);
        if(clusterMachMap.isEmpty()) return;
        //生成sh,执行sh,生成log文件
        kerberosFileName = doBusiness(context, sql, shellpath, kerberos_sh, initPath, fileName, kerberosFileName, clusterMachMap);
        //读取生成的log文件，并插入到cluster_user_kbrauth
        ReadLogAndUpdateTable(sql, sqlList, kerberosFileName, clusterMachMap);
        createCPShell(context);

    }

    private void createCPShell(ClusterContext context){
        if(Tools.DemoEnvironmenttal == 0){
            // TODO: 2016/5/20 生成cmd 查数据库获取路径和名字 复制->改名->授权
            StringBuffer sql = new StringBuffer();
            Tools.getCreateSqlParam(sql, "select principal,ticketPath from cluster_user_kbrauth where machineID=7;");
            Connection conMachine = ConnectionManager.getConnection();
            List<Map<String, Object>> resKbrs = ConnectionManager.queryDB(conMachine, sql.toString());
            if (null == resKbrs || resKbrs.isEmpty()){
                logger.info("###KerberosAPI: Query cluster_user_kbrauth table is null!");
                return;
            }
            StringBuffer cmd = new StringBuffer();
            for (Map<String, Object> kbr : resKbrs) {// filename path
                cmd.append("sudo cp "+ kbr.get("TICKETPATH").toString() + " " + context.getCommonProperties().getProperty(CommonProperties.Ticket_Cache_Path, "")+kbr.get("PRINCIPAL").toString().split("@")[0]);
                doCommand(cmd.toString());
                cmd.setLength(0);
                cmd.append("sudo chmod 644 " + kbr.get("PRINCIPAL").toString().split("@")[0]);
                doCommand(cmd.toString());
                cmd.setLength(0);
            }




        }
        /*Hoster exeHoster= CommonTools.getWebAppNode().get(0);

        List<String> cpList=new ArrayList<String>();
        String ticket_cache_path = context.getCommonProperties().getProperty(CommonProperties.Ticket_Cache_Path, "");//脚本生成路径
        List<Map<String,Object>> kdcAuthList=readKDCAuthiForPath();

        for(Map<String,Object> kdcAuth:kdcAuthList) {
            String path=kdcAuth.get("TICKET_CAHCE_PATH")==null?"":kdcAuth.get("TICKET_CAHCE_PATH").toString();
            String user=kdcAuth.get("HP_USER_NAME")==null?"":kdcAuth.get("HP_USER_NAME").toString();
            if(path.trim().length()==0)continue;
            //复制
            String cpCMD = "sudo cp " + path + " " + ticket_cache_path + user+";";
            //修改读写权限
            cpCMD=cpCMD+"sudo chmod o+r "+ticket_cache_path + user;
            cpCMD=cpCMD+CommonProperties.Linux_Delimited;

            cpList.add(cpCMD);
        }

        Tools.writeLinuxFile(this.kerberosCPTicketShell,cpList);

        //执行 刷新shell
        String cmd="sh "+this.kerberosCPTicketShell;
        Tools.exeOneShellCMD(cmd, exeHoster);

        return;*/
    }

    private void doCommand(String cmd) {
        StringBuffer buf = new StringBuffer(1000);
        try {
            Process pos = Runtime.getRuntime().exec(cmd);
            pos.waitFor();
            InputStreamReader ir = new InputStreamReader(pos.getInputStream());
            LineNumberReader input = new LineNumberReader(ir);
            String ln="";
            while ((ln =input.readLine()) != null) {
                buf.append(ln+"<br>");
            }
            logger.info(buf.toString());
            input.close();
            ir.close();
        } catch (Exception e) {
            logger.error("###flush Kerberos command error:" + e);
        }
    }

    public List<Map<String,Object>> readKDCAuthiForPath(){
       /* Hoster exeHoster= CommonTools.getWebAppNode().get(0);
        String ip=exeHoster.getIp();
        //查询出基础信息  通过 ip 和 状态 过滤
        String sql= "select kdc_auth.ticket_cahce_path,hadoopuser.hp_user_name from kdc_auth left join hadoopuser on kdc_auth.hp_user_id=hadoopuser.hp_user_id" +
                " where kdc_auth.ker_status=0 and kdc_auth.mc_ip='"+ip+"'";
        Connection conn= ConnectionManager.getConnection();

        List<Map<String,Object>> rsList=ConnectionManager.queryDB(conn, sql);

        return rsList;*/
        return null;
    }

    //生成sh,执行sh,生成log文件
    private String doBusiness(ClusterContext context, StringBuffer sql, String shellpath, String kerberos_sh, String initPath, String fileName, String kerberosFileName, HashMap<String, String> clusterMachMap) {
        //生成刷新Kerberos的脚本
        StringBuffer cmd = getFlushKerberosSehll(kerberosFileName, shellpath, sql, clusterMachMap);
        //如果是本地测试就在本地生成脚本
        if (Tools.DemoChangeDataBase == 1) {
            initPath = System.getProperty("user.dir")+System.getProperty("file.separator");
        }
        String file_path = initPath + kerberos_sh;
        //写入脚本文件
        writeShellFileToMachine(cmd, file_path,initPath);
        //如果是本地测试文件并上传到服务器
        updateFileOrNo(context, file_path);
        //执行脚本 -> 生成log文件
        execShell(context, "sh " + context.getCommonProperties().getProperty(CommonProperties.WEBAPP_INIT_PATH, "") + "/" + kerberos_sh);
        context.printResult();
        //如果是本地测试，就下载文件到本地读取
        kerberosFileName = downloadFileOrNo(context, fileName, kerberosFileName);
        return kerberosFileName;
    }

    //读取生成的log文件，并插入到cluster_user_kbrauth
    private void ReadLogAndUpdateTable(StringBuffer sql, List<String> sqlList, String kerberosFileName, HashMap<String, String> clusterMachMap) {
        //读取命令生成完的log
        if (ReaderLogFile(kerberosFileName)) return;
        List<String> kerberosList = Tools.readFile2List(kerberosFileName);
        //如果是本地测试删掉log文件
        if (Tools.DemoChangeDataBase == 1) new File(kerberosFileName).delete();
        //清除cluster_user_kbrauth
        clearClusterUserKbrauth(sqlList, sql);
        //拼接插入到cluster_user_kbrauth的sql
        getInsterClusterUKbrauthSql(sqlList, sql, clusterMachMap, kerberosList);
        //插入cluster_user_kbrauth
        Tools.exeSQLBatch(sqlList);
    }

    //如果是本地测试，就下载文件到本地读取
    private String downloadFileOrNo(ClusterContext context, String fileName, String kerberosFileName) {
        String initPath;
        if (Tools.DemoChangeDataBase == 1) {
            initPath = System.getProperty("user.dir")+System.getProperty("file.separator");
            List<LinuxMachine> machines=context.getOriginalList();
            List<LinuxMachine> finalMachines = CommonTools.getMachineListByType(machines,context,
                        LinuxMachine.MachineType.WebAPP);
            for(LinuxMachine machine :finalMachines){
                if(machine.getMachineType()== LinuxMachine.MachineType.WebAPP){
                    CommonTools.downLoadFileToLocal(machine.getSshAuthor(),kerberosFileName,initPath);
                }
            }
            kerberosFileName = initPath + fileName;
        }
        return kerberosFileName;
    }

    //如果是本地测试文件并上传到服务器
    private void updateFileOrNo(ClusterContext context, String file_path) {
        if (Tools.DemoChangeDataBase == 1) {
            List<LinuxMachine> machines = context.getOriginalList();
                List<LinuxMachine> finalMachines = CommonTools.getMachineListByType(machines, context,
                        LinuxMachine.MachineType.WebAPP);
                for (LinuxMachine machine : finalMachines) {
                    if (machine.getMachineType() == LinuxMachine.MachineType.WebAPP) {
                        CommonTools.upLoadLocalFile(machine.getSshAuthor(), file_path, context.getCommonProperties().getProperty(CommonProperties.WEBAPP_INIT_PATH, "") + "/");
                }
            }
        new File(file_path).delete();
        }
    }

    private boolean ReaderLogFile(String kerberosFileName) {
        File file = new File(kerberosFileName);
        if(!file.exists()){
            file.delete();
            logger.info("###KerberosAPI: Reader log error, please check path of log file!");
            return true;
        }
        return false;
    }

    //拼接插入到cluster_user_kbrauth的sql
    //这里加入一个注释
    private void getInsterClusterUKbrauthSql(List<String> sqlList, StringBuffer sql, HashMap<String, String> clusterMachMap, List<String> kerberosList) {
        //查询cluster_user
        Map<String, String> userInfo = getUserInfo();
        sqlList.clear(); sql.setLength(0);
        boolean started = false, time = false;
        String path = "", start = "", end = "", status = "0";
        for (String info : kerberosList) {
            if (StringUtils.isEmpty(info)) continue;
            if (info.startsWith(CommonProperties.START_KERBEROS_REFRESH)) {
                started = getIfStrartKBRefresh(sql, clusterMachMap, userInfo, info);
            } else if (info.startsWith(CommonProperties.END_KERBEROS_REFRESH)) {
                started = getIfEndKBRefresh(sqlList, sql, path, start, end, status);
                //清空参数
                sql.setLength(0); path = ""; start = ""; end = ""; status = "0"; time = false;
            }
            if (started) {
                //替换参数
                //status 0成功 1失败
                if (info.contains("Password incorrect")) {
                    status = "1";
                }
                //path
                if (info.contains("Ticket cache:")) {
                    String[] tmp = info.split(":");
                    if (tmp.length >= 3) path = tmp[2].trim();
                }
                //start end
                if (info.contains("Valid starting")) {
                    time = true;
                }
                if (time && !info.contains("Valid starting")) {
                    String[] tmp = info.split(" ");
                    if (tmp.length >= 5) {
                        start = Tools.parseStringToDate(tmp[0] + " " + tmp[1]);
                        end = Tools.parseStringToDate(tmp[3] + " " + tmp[4]);
                    } else {
                        status = "1";
                    }
                    time = false;
                }
            }
        }
    }

    //当条件为end_kerberos_refresh时
    private boolean getIfEndKBRefresh(List<String> sqlList, StringBuffer sql, String path, String start, String end, String status) {
        boolean started;
        if (start.trim().length() == 0 || end.trim().length() == 0) {
            status = "1";
        }
        started = false;
        //还需要分析是否有没有替换完成的参数
        sqlList.add(sql.toString().replace("@start@", StringUtils.isBlank(start) ? "0" : start)
                .replace("@end@", StringUtils.isBlank(end) ? "0" : end)
                .replace("@path@", path).replace("@status@", status));
        return started;
    }

    //当条件为start_kerberos_refresh时
    private boolean getIfStrartKBRefresh(StringBuffer sql, HashMap<String, String> clusterMachMap, Map<String, String> userInfo, String info) {
        boolean started; String params; String user; started = true;
        params = info.split(":")[1];
        String ip = params.split(",")[0];
        //根据ip得到机器id
        String mc_ip = getMcIdByIp(clusterMachMap, ip);
        user = params.split(",")[1];
        String userid = userInfo.get(user);
        if (userid == null) {
            userid = user;
        }
        String ker_princ = params.split(",")[2];
        sql.append(" insert into cluster_user_kbrauth (userId,machineId,startTime");
        sql.append(" ,endTime,principal,ticketPath,status,createTime)  values('");
        sql.append(userid + "','" + mc_ip + "','@start@','@end@','" + ker_princ + "");
        sql.append(" ','@path@',@status@," + Tools.getCurrentDateTime() + ")");
        return started;
    }

    //根据ip得到机器id
    private String getMcIdByIp(HashMap<String, String> clusterMachMap, String ip) {
        String mc_ip = "";
        Iterator<Map.Entry<String, String>> iterator = clusterMachMap.entrySet().iterator();
        while (iterator.hasNext()) {
            Map.Entry<String, String> entry = iterator.next();
            if (StringUtils.isEmpty(ip)) continue;
            if (entry.getValue().contains(ip)) {
                mc_ip = entry.getKey();
                break;
            }
        }
        return mc_ip;
    }

    //清除cluster_user_kbrauth
    private void clearClusterUserKbrauth(List<String> sqlList, StringBuffer sql) {
        sql.setLength(0);
        sql.append("truncate table cluster_user_kbrauth");
        sqlList.clear();
        sqlList.add(sql.toString());
        Tools.exeSQLBatch(sqlList);
    }

    //执行脚本 -> 生成log文件
    private void execShell(ClusterContext context, String cmd) {
        if(Tools.DemoEnvironmenttal == 0){
            doCommand(cmd);
        }else{
            List<LinuxMachine> machineList = context.getOriginalList();
            List<LinuxMachine> finalMachines = CommonTools.getMachineListByType(machineList, context,LinuxMachine.MachineType.WebAPP);
            //webapp 只会有一台 这里只需要 得到第一个就可以了
            finalMachines.get(0).initOperation(OperationMarket.ExeOneShellCMD(cmd));
            //重新设置 机器列表
            context.setMachineList(finalMachines);
            //执行
            context.doTheThing();
            //打印结果
            context.printResult();
        }
    }

    //生成刷新Kerberos的脚本
    private StringBuffer getFlushKerberosSehll(String kerberosFileName, String shellpath, StringBuffer sql, HashMap<String, String> clusterMachMap) {
        StringBuffer cmd = new StringBuffer();
        if (clusterMachMap == null){
            logger.info("###KerberosAPI: Query cluster_machine error!");
            return cmd;
        }
        //读取数据库 kdc_config -> resMap
        List<Map<String, Object>> resMap = getConfigInfo(sql);
        if (null == resMap || resMap.isEmpty()){
            logger.info("###KerberosAPI: Query cluster_user_kbrconfig error!");
            return cmd;
        }
        //处理machineIds为多个的情况
        List<Map<String, Object>> configMap = getMachineByIds(resMap);
        //拼接脚本
        cmd.append("#!/bin/bash" + "\n");
        getFinalShell(configMap, clusterMachMap, cmd, kerberosFileName, shellpath);
        return cmd;
    }

    //在机器写入脚本文件
    private boolean writeShellFileToMachine(StringBuffer cmd, String file_path, String initPath) {
            File file=new File(initPath);
            if(!file.exists()){
                logger.info("###KerberosAPI: Write sh error, please check path of sh file !");
                file.delete();
                return false;
            }
        Tools.writeLinuxFile(file_path, new StringBuffer(cmd.toString()));
        return  true;
    }

    /**
     * @param configMap
     * @param clusterMachMap
     * @param cmd
     * @param kerberosFileName
     * @param shellpath
     */
    //拼接脚本
    private void getFinalShell(List<Map<String, Object>> configMap, HashMap<String, String> clusterMachMap, StringBuffer cmd, String kerberosFileName, String shellpath) {
        //clusterMachMap的value :ID MACHINEIP LOGINPASSWORD LOGINUSERNAME
        StringBuffer param = new StringBuffer();
        String MachineId = "", kdc_realm = "";
        for (Map<String, Object> con : configMap) {
            MachineId = con.get("MACHINEID").toString();
            if (StringUtils.isEmpty(MachineId)) continue;
            kdc_realm = con.get("USERNAME") + Properties.instance().getPropertyByKey(CommonProperties.KDC_REALM, "");
            param.append(clusterMachMap.get(MachineId).split(" ")[1]);//MACHINEIP
            param.append(" " + clusterMachMap.get(MachineId).split(" ")[3] + " '");//LOGINUSERNAME
            param.append(clusterMachMap.get(MachineId).split(" ")[2] + "' ");//LOGINPASSWORD
            param.append(con.get("USERNAME") + " " + kdc_realm + " '" + con.get("SYSTEMPW") + "'");
            cmd.append("echo -e \"\\n" + CommonProperties.START_KERBEROS_REFRESH + ":" + clusterMachMap.get(MachineId).split(" ")[1]
                    + "," + con.get("USERNAME") + "," + kdc_realm + ":param_end\">>" + kerberosFileName + ";" +
                    "expect " + shellpath + "authClientPrinc.exp " + param + ">>" + kerberosFileName + ";" +
                    "echo -e \"\\n" + CommonProperties.END_KERBEROS_REFRESH + "\\n\">>" + kerberosFileName + "\n");
            param.setLength(0);
        }
    }

    //处理machineIds为多个的情况
    private List<Map<String, Object>> getMachineByIds(List<Map<String, Object>> resMap) {
        List<Map<String, Object>> configMap = new ArrayList<Map<String, Object>>();
        for (Map<String, Object> machine : resMap) {
            if (StringUtils.isEmpty(machine.get("MACHINEIDS").toString())) continue;
            String[] machineTem = machine.get("MACHINEIDS").toString().split(",");
            if (machineTem.length > 1) {
                for (String machineId : machineTem) {
                    HashMap<String, Object> temMachine = new HashMap<String, Object>();
                    temMachine.put("USERNAME", machine.get("USERNAME"));
                    temMachine.put("SYSTEMPW", machine.get("SYSTEMPW"));
                    temMachine.put("MACHINEID", machineId);
                    configMap.add(temMachine);
                }
            } else {
                HashMap<String, Object> temMachine = new HashMap<String, Object>();
                temMachine.put("USERNAME", machine.get("USERNAME"));
                temMachine.put("SYSTEMPW", machine.get("SYSTEMPW"));
                temMachine.put("MACHINEID", machineTem[0]);
                configMap.add(temMachine);
            }
        }
        return configMap;
    }

    //读取数据库config配置到resMap
    private List<Map<String, Object>> getConfigInfo(StringBuffer sql) {
        sql.setLength(0);
        Tools.getCreateSqlParam(sql, "SELECT cluster_user.userName,cluster_user.systemPW"
                , ",cluster_user_kbrconfig.machineIds from cluster_user_kbrconfig left "
                , "join cluster_user on cluster_user_kbrconfig.userId=cluster_user.id"
                , " where cluster_user_kbrconfig.status=0");
        Connection conClusterUser = ConnectionManager.getConnection();
        return ConnectionManager.queryDB(conClusterUser, sql.toString());
    }

    //封装clusterMachMap   key -> id value -> value
    private HashMap<String, String> getClusterMachMap(StringBuffer sql) {
        sql.setLength(0);
        HashMap<String, String> clusterMachMap = new HashMap<String, String>();
        Tools.getCreateSqlParam(sql, "SELECT id,machineIp,loginUserName,loginPassWord from cluster_machine;");
        Connection conMachine = ConnectionManager.getConnection();
        List<Map<String, Object>> resMachine = ConnectionManager.queryDB(conMachine, sql.toString());
        if (null == resMachine || resMachine.isEmpty()){
            logger.info("###KerberosAPI: Query cluster_machine table is null!");
            return new HashMap<String, String>();
        }
        for (Map<String, Object> mId : resMachine) {
            clusterMachMap.put(mId.get("ID").toString(), mId.get("ID").toString()
                    + " " + mId.get("MACHINEIP").toString() + " "
                    + mId.get("LOGINPASSWORD").toString() + " "
                    + mId.get("LOGINUSERNAME").toString());
        }
        return clusterMachMap;
    }

    //封装clusterMachMap   key -> ip value -> value
    private HashMap<String, String> getClusterMachMapByIp() {
        HashMap<String, String> clusterMachMap = new HashMap<String, String>();
        StringBuffer sql = new StringBuffer("SELECT id,machineIp,loginUserName,loginPassWord from cluster_machine;");
        Connection conMachine = ConnectionManager.getConnection();
        List<Map<String, Object>> resMachine = ConnectionManager.queryDB(conMachine, sql.toString());
        if (null == resMachine && resMachine.isEmpty()) return null;
        for (Map<String, Object> mId : resMachine) {
            clusterMachMap.put(mId.get("MACHINEIP").toString(), mId.get("ID").toString()
                    + " " + mId.get("MACHINEIP").toString() + " "
                    + mId.get("LOGINPASSWORD").toString() + " "
                    + mId.get("LOGINUSERNAME").toString());
        }
        return clusterMachMap;
    }

    //查询cluster_user
    public Map<String, String> getUserInfo() {
        /** 获取默认的 系统集群信息 */
        Map<String, String> userMap = new HashMap<String, String>();
        List<String> sqlList = new ArrayList<String>();
        //查询出基础信息
        StringBuffer sql = new StringBuffer("select id,userName from cluster_user");
        Connection conn = ConnectionManager.getConnection();
        List<Map<String, Object>> rsList = ConnectionManager.queryDB(conn, sql.toString());
        for (Map<String, Object> user : rsList) {
            userMap.put(user.get("USERNAME").toString(), user.get("ID").toString());
        }
        return userMap;
    }

    //读取KDC刷新记录
    public List<Map<String, Object>> readKDCAuthi() {
        StringBuffer sql = new StringBuffer("select cluster_user_kbrauth.machineId,cluster_user_kbrauth.endTime");
        sql.append(" ,cluster_user_kbrauth.principal,cluster_user_kbrauth.status,cluster_user.userName from ");
        sql.append(" cluster_user_kbrauth left join cluster_user on cluster_user_kbrauth.userId=cluster_user.id");
        sql.append(" where cluster_user_kbrauth.status=1");
        Connection conn = ConnectionManager.getConnection();
        List<Map<String, Object>> rsList = ConnectionManager.queryDB(conn, sql.toString());
        return rsList;
    }

    //将cluster_user_kbrauth表status为1的插入到t_schedule
    /*private List<String> insertSheduleSQL(List<String> sqlList) {
        sqlList.clear();
        List<Map<String, Object>> kdcAuth = readKDCAuthi();
        String phone = Properties.instance().getPropertyByKey("MESSAGE.MESSAGE_PHONE", "");
        for (Map<String, Object> kdc : kdcAuth) {
            String status = kdc.get("STATUS").toString();
            String endtime = kdc.get("ENDTIME").toString();
            String ip = kdc.get("MACHINEID").toString();
            String princle = kdc.get("PRINCIPAL").toString();
            String hadoopuser = kdc.get("USERNAME").toString();
            String param = "phone=" + phone + ";message=用户[" + hadoopuser + "]" +
                    "kerberos刷新失败[Time:" + Tools.getCurrentDate() + ",IP:" + ip + "Princle:" + princle + "]！";
            if (status.equals("1")) {
                String sql = "insert into t_schedule(type,run_time,parameters)" +
                        "values('MESSAGE_ALERT','" + Tools.getCurrentDate() + "','" + param + "')";
                sqlList.add(sql);
            } else if (getDateByFormate(null, endtime).before(new Date())) {
                String sql = "insert into t_schedule(type,run_time,parameters)" +
                        "values('MESSAGE_ALERT','" + Tools.getCurrentDate() + "','" + param + "')";
                sqlList.add(sql);
            }
        }
        return sqlList;
    }*/

    //获取date 类型的值
    public static Date getDateByFormate(String formate, String dateStr) {
        if (formate == null || formate.trim().length() == 0)
            formate = "yyyyMMDDHHMMSS";
        SimpleDateFormat sdf = new SimpleDateFormat(formate);
        Date date = null;
        try {
            date = sdf.parse(StringUtils.isEmpty(dateStr) ? Tools.getCurrentDate() : dateStr);
        } catch (ParseException e) {
            e.printStackTrace();
        }
        return date;
    }

    public void call() {
        //构造测试ClusterContext  测试用
//        Map<String, String> propertyMap = new HashMap<String, String>();
//        propertyMap.put(Properties.Kerberos_INFO_FILENAME, Properties.Kerberos_INFO_FILENAME);//正式环境(kerberos执行完脚本生成的日志路径)
//        propertyMap.put(CommonProperties.WEBAPP_INIT_PATH, Properties.instance().getPropertyByKey(CommonProperties.WEBAPP_INIT_PATH, ""));
//        propertyMap.put(CommonProperties.WEBAPP_SHELL_PATH, Properties.instance().getPropertyByKey(CommonProperties.WEBAPP_SHELL_PATH, "")); //生成脚本路径
//        propertyMap.put(Properties.Kerberos_EXE_FileName, Properties.Kerberos_EXE_FileName); //kerberos脚本名
//        CommonProperties commonProperties = new CommonProperties(propertyMap);
//        ClusterContext context = new ClusterContext(commonProperties);
//        //构造机器参数
//        List<LinuxMachine> machineList = new ArrayList<LinuxMachine>();
//        LinuxMachine nn1 = new LinuxMachine().initIP("10.5.24.151").initLoginName("I-Hadoop")
//                .initPassWord("ideal123").initMachineType(LinuxMachine.MachineType.WebAPP)
//                .initPubKey("AAAAC3NzaC1lZDI1NTE5AAAAIFceht+lsORHJXhdlnB6+zQJ3Z3vfme546mAuzqmbtIy");
//        machineList.add(nn1);
//        context.setOriginalList(machineList);
        //构造结束  测试用
        //刷新Kerberos
        FlushKerberos(Tools.getDemoClusterContextDemo());
    }

    //Demo
    public static void main(String[] d) {
        KerberosAPI ki = new KerberosAPI();
        //刷新Kerberos
        ki.FlushKerberos(Tools.getDemoClusterContextDemo());
    }

}
