package com.ideal.tools.ssh.common;

import com.ideal.tools.ssh.context.ClusterContext;
import com.ideal.tools.ssh.entity.LinuxCommand;
import com.ideal.tools.ssh.entity.LinuxMachine;
import com.ideal.tools.ssh.entity.SSHAuthor;
import com.ideal.tools.ssh.executor.SCPDownLoadExecutor;
import com.ideal.tools.ssh.executor.SCPFileExecutor;
import com.ideal.tools.ssh.result.ExecutorResult;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.*;
import java.text.SimpleDateFormat;
import java.util.*;

/**
 * Created by CC on 2016/3/10.
 */
public class CommonTools {

    private static SimpleDateFormat format = new SimpleDateFormat("yyyyMMddHHmmss");
    private static Logger logger = LoggerFactory.getLogger(CommonTools.class);

    /**
     * 根据用户名 获取用户组
     * @param userName
     * @return
     */
    public static String getUserGroupByUser(String userName,CommonProperties properties){
        String group_suffix=properties.getProperty(CommonProperties.HDFS_GROUP_SUFFIX,"_group");
        return userName+group_suffix;
    }

    /**
     * 获取 公用目录组
     * @param username
     * @param tblName
     * @return
     */
    public static String getHDFSPubDirGroup(String username,String tblName){
        return username+"_"+tblName;
    }

    /**
     * 获取父目录
     * @param userName
     * @param properties
     * @return
     */
    public static String getHDFSParentDir(String userName,CommonProperties properties){
        String dir_prefix=properties.getProperty(CommonProperties.HDFS_PATH_PREFIX,"/user/");
        return dir_prefix+userName;
    }

    /**
     * 获取公有目录
     * @param userName
     * @param properties
     * @return
     */
    public static String getHDFSPublicDir(String userName,CommonProperties properties){
        String dir_prefix=properties.getProperty(CommonProperties.HDFS_PATH_PREFIX,"/user/");
        String public_suffix= properties.getProperty(CommonProperties.HDFS_PUBLIC_PATH_SUFFIX,"/public");
        return dir_prefix+userName+public_suffix;
    }

    /**
     * 获取公用表路径
     * @param userName
     * @param tblName
     * @param properties
     * @return
     */
    public static String getHDFSPublicTblDir(String userName,String tblName,CommonProperties properties){
        String hdfsPub = getHDFSPublicDir(userName,properties);
        return hdfsPub + "/" + tblName;
    }
    /**
     * 获取私有目录
     * @param userName
     * @param properties
     * @return
     */
    public static String getHDFSPrivateDir(String userName,CommonProperties properties){
        String dir_prefix=properties.getProperty(CommonProperties.HDFS_PATH_PREFIX,"/user/");
        String private_suffix= properties.getProperty(CommonProperties.HDFS_PRIVATE_PATH_SUFFIX,"/private");
        return dir_prefix+userName+private_suffix;
    }


    /**
     * 获取垃圾箱目录
     * @param userName
     * @param properties
     * @return
     */
    public static String getHDFSTrashDir(String userName,CommonProperties properties){
        String dir_prefix=properties.getProperty(CommonProperties.HDFS_PATH_PREFIX,"/user/");
        String trash_suffix= properties.getProperty(CommonProperties.HDFS_TRASH_PATH_SUFFIX,"/.Trash");
        return dir_prefix+userName+trash_suffix;
    }

    /**
     * 获取hdfs hive临时目录
     * @param userName
     * @param properties
     * @return
     */
    public static String getHDFSHiveTempDir(String userName,CommonProperties properties){
        String hive_suffix= properties.getProperty(CommonProperties.HDFS_HIVE_TEMP_DIR,"/tmp/hive-");
        return hive_suffix+userName;
    }


    /**
     * 获取 hive 日志信息
     * @param userName
     * @param properties
     * @return
     */
    public static String getClientHiveLogDir(String userName,CommonProperties properties){
        String hive_suffix= properties.getProperty(CommonProperties.HIVE_CLIENT_LOG_DIR,"/tmp/");
        return hive_suffix+userName;
    }

    /**
     * 获取Sentry hive 的角色模板
     * @param useName
     * @param properties
     * @return
     */
    public static String getHiveSentryPrincipalPath(String useName,CommonProperties properties){
        String hiveHdfspathPrefix= properties.getProperty(CommonProperties.HIVE_HDFSPATH_PREFIX,"hdfs://ns3");
        String hdfsPathPrefix= properties.getProperty(CommonProperties.HDFS_PATH_PREFIX,"/user/");
        return hiveHdfspathPrefix + hdfsPathPrefix + useName +"/" ;
    }

    public static List<String> getHiveTables(String hiveTbls){
        String[] tbls=hiveTbls.split(",");
        return Arrays.asList(tbls);
    }


    /**
     * 根绝Path获取role
     *
     * @param path
     * role_@item@_@principle@_@path@
     * @return
     */
    public static String getHiveRole(String item,String principle,String path,CommonProperties commonProperties){
        String role="";

        String principleTample = commonProperties.getProperty(
                CommonProperties.HIVE_SENTRY_ROLE_TEMPLET,"role_@item@_@principle@_@path@");

        if(item.trim().length()==0||principle.trim().length()==0||path.trim().length()==0)
            return role;
        //角色类型 db tb
        principleTample=principleTample.replace("@item@",item);
        //角色权限类型 all select ..
        principleTample=principleTample.replace("@principle@",principle);
        //角色具体描述
        principleTample=principleTample.replace("@path@",path);

        return principleTample;
    }

    /**
     * 根据机器的类型 获取新的list
     * @param originalList
     * @param machineType
     * @return
     */
    public static List<LinuxMachine> getMachineListByType(List<LinuxMachine> originalList
            ,ClusterContext context,LinuxMachine.MachineType machineType){
        List<LinuxMachine> linuxMachines = new ArrayList<LinuxMachine>();

        for(LinuxMachine machine:originalList){
            if(machine.getMachineType() == machineType){
                linuxMachines.add(createLinuxMachine(machine,context));
            }
        }

        return linuxMachines;
    }

    /**
     * 在添加操作的时候 是单台机器 一个一个初始 这个时候存在 同一个机器被初始多次
     * 所以 这里 需要合并一下 同一个机器 必要的操作
     * 这里合并后的结果集 必须是新建的 这样才能保证多 步 不会重用一个object
     */
    public static List<LinuxMachine> megerMachineOperation(List<LinuxMachine> machineList,ClusterContext context){
        List<LinuxMachine> finalMachineList = new ArrayList<LinuxMachine>();
        //所有不同机器信息
        Map<String,LinuxMachine> distinctMachine = new HashMap<String, LinuxMachine>();
        //合并机器的属性
        for(LinuxMachine linuxMachine : machineList){
            String machineId=linuxMachine.getMachineSingle();
            LinuxMachine tmpMachine=distinctMachine.get(machineId);
            //为空就新建
            if(tmpMachine==null){
                tmpMachine =createLinuxMachine(linuxMachine,context);
            }
            List<LinuxMachine.MachineType> typeList=tmpMachine.getMachineRoleTypes();
            if(typeList==null){
                typeList=new ArrayList<LinuxMachine.MachineType>();
                tmpMachine.setMachineRoleTypes(typeList);
            }
            typeList.add(linuxMachine.getMachineType());


            distinctMachine.put(machineId,tmpMachine);
        }

        //map转换成list 方便使用
        for(Map.Entry<String,LinuxMachine> entry:distinctMachine.entrySet()){
            finalMachineList.add(entry.getValue());
        }
        return finalMachineList;
    }

    private static LinuxMachine createLinuxMachine(LinuxMachine machine,ClusterContext context){
        LinuxMachine tmpMachine = new LinuxMachine();
        tmpMachine.setSshAuthor(machine.getSshAuthor());
        tmpMachine.setMachineType(machine.getMachineType());
        tmpMachine.initLinuxContext(context);
        return tmpMachine;
    }


    /**
     * 获取当前时间点的文件
     * @param fileName
     * @return
     */
    public static String getTimeFileName(String fileName){
        if(fileName.contains(".")){
            fileName=fileName.replace(".",getCurrentDate()+".");
        }else{
            fileName=fileName+getCurrentDate();
        }
        return fileName;
    }

    //获取当前时分秒 格式：yyyyMMddHHmmss
    public static String getCurrentDate() {
        String date = format.format(System.currentTimeMillis());
        return date;
    }


    /**
     * 下载文件到 本地服务器
     * @param fromSSHAuthor
     * @param fromPath
     * @param toPath
     */
    public static ExecutorResult downLoadFileToLocal(SSHAuthor fromSSHAuthor,String fromPath,String toPath){
        SCPFileExecutor scpFileExecutor= new SCPFileExecutor(fromSSHAuthor);
        scpFileExecutor.setSCP_DIRECTION(SCPFileExecutor.DOWNLOAD);
        return scpFileExecutor.exec(new LinuxCommand().initFromPath(fromPath).initToPath(toPath));
    }


    /**
     * 上传本地服务器文件
     * @param toSSHAuthor
     * @param fromPath
     * @param toPath
     */
    public static ExecutorResult upLoadLocalFile(SSHAuthor toSSHAuthor,String fromPath,String toPath){
        SCPFileExecutor scpFileExecutor= new SCPFileExecutor(toSSHAuthor);
        scpFileExecutor.setSCP_DIRECTION(SCPFileExecutor.UPLOAD);
        return scpFileExecutor.exec(new LinuxCommand().initFromPath(fromPath).initToPath(toPath));
    }


    /**
     * 生成一个认证
     * @param host
     * @param user
     * @param passwd
     * @param pubKey
     * @return
     */
    public SSHAuthor getOneSSHAuth(String host,String user,String passwd,String pubKey){
        SSHAuthor tmpSShAuth=new SSHAuthor();
        tmpSShAuth.setHost(host);
        tmpSShAuth.setUsername(user);
        tmpSShAuth.setPasswd(passwd);
        tmpSShAuth.setPubToken(pubKey);
        return tmpSShAuth;
    }

    public static String getFormateTime(long sysTime){
        return format.format(System.currentTimeMillis());
    }

    /**
     * 获取hdfs文件名称
     * @return
     */
    public static String getHDFSInfoFileName(){
        String fileName=PropertyBox.getVal("HDFS_INFO_FILENAME","")+getCurrentDate()+".txt";
        return fileName;
    }

    /**
     * 读取 txt 文件
     * @param path
     * @return
     */
    public static List<String> readFile2List(String path){
        List<String> txtList=new ArrayList<String>();
        File file=new File(path);
        if(!file.exists()){
            logger.info("Path["+path+"] is not exited!");
            return null;
        }
        BufferedReader reader=null;
        try {
            reader=new BufferedReader(new FileReader(file));
            String tmp="";
            while((tmp=reader.readLine())!=null){
                txtList.add(tmp);
            }
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        } catch (IOException e) {
            e.printStackTrace();
        }finally {
            try {
                if(reader!=null)
                    reader.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        return txtList;
    }

    //转换字符
    public static String convertString(String val) {
        String rs = "";
        /** ascii 转换 */
        int valLen = val.length();
        char c = 'a';
        for (int v = 0; v < valLen; v++) {
            c = val.charAt(v);
            if (c == 9)
                rs = "'\\t'";
            else if (c == 10)
                rs = "'\\n'";
            else
                rs = "'" + val + "'";
        }
        return rs;
    }
}
