package cn.dceast.common.utils;

import com.jcraft.jsch.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.*;
import java.net.URI;
import java.nio.charset.Charset;
import java.util.HashMap;
import java.util.Map;

/**
 * Created by yang_ on 2017/6/23.
 */
public class ShellUtils {
    private static String charset = "UTF-8"; // 设置编码格式
    private static String user; // 用户名
    private static String passwd; // 登录密码
    private static String host; // 主机IP
    private static int port = 22; //端口
    private static JSch jsch;
    private static Session session;


    /**
     * 连接到指定的IP
     *
     * @throws JSchException
     */
    public static void connect(String user, String passwd, String host) throws JSchException {
        jsch = new JSch();
        session = jsch.getSession(user, host, port);
        session.setPassword(passwd);

        java.util.Properties config = new java.util.Properties();
        config.put("StrictHostKeyChecking", "no");
        session.setConfig(config);

        session.connect();
    }
    /**
     * 执行相关的命令
     *
     * @throws JSchException
     */
    public static void execCmd(String command, String user, String passwd, String host) throws JSchException {
        connect(user, passwd, host);

        BufferedReader reader = null;
        Channel channel = null;

        try {
            while (command != null) {
                channel = session.openChannel("exec");
                ((ChannelExec) channel).setCommand(command);

                channel.setInputStream(null);
                ((ChannelExec) channel).setErrStream(System.err);

                channel.connect();
                InputStream in = channel.getInputStream();
                reader = new BufferedReader(new InputStreamReader(in));
                String buf = null;
                while ((buf = reader.readLine()) != null) {
                    System.out.println(buf);
                }
            }
        } catch (IOException e) {
            e.printStackTrace();
        } catch (JSchException e) {
            e.printStackTrace();
        } finally {
            try {
                reader.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
            channel.disconnect();
            session.disconnect();
        }
    }

    /**
     * 执行相关的命令
     */
    public static void execCmd() {
        BufferedReader br = new BufferedReader(new InputStreamReader(System.in));

        String command = "";
        BufferedReader reader = null;
        Channel channel = null;

        try {
            while ((command = br.readLine()) != null) {
                channel = session.openChannel("exec");
                ((ChannelExec) channel).setCommand(command);
                channel.setInputStream(null);
                ((ChannelExec) channel).setErrStream(System.err);

                channel.connect();
                InputStream in = channel.getInputStream();
                reader = new BufferedReader(new InputStreamReader(in,
                        Charset.forName(charset)));
                String buf = null;
                while ((buf = reader.readLine()) != null) {
                    System.out.println(buf);
                }
            }
        } catch (IOException e) {
            e.printStackTrace();
        } catch (JSchException e) {
            e.printStackTrace();
        } finally {
            try {
                reader.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
            channel.disconnect();
        }
    }

    public static void doShell(String m,String sf) {
        ChannelShell channel = null;
        InputStream inputStream = null;
        OutputStream outputStream = null;
        BufferedReader in = null;
        PrintWriter printWriter=null;
        try {
            channel = (ChannelShell) session.openChannel("shell");
            channel.connect();
            inputStream = channel.getInputStream();
            outputStream = channel.getOutputStream();
            printWriter = new PrintWriter(outputStream);
            String cmd2 = "cd /user/local/spark/spark-2.1.1-bin-hadoop2.7/";
            printWriter.println(cmd2);
            String cmd3 = m+sf;
            printWriter.println(cmd3);
            printWriter.println("exit");//加上个就是为了，结束本次交互
            printWriter.flush();
            in = new BufferedReader(new InputStreamReader(inputStream));

            String msg = null;
            while ((msg = in.readLine()) != null) {
                System.out.println(msg);
            }
        } catch (JSchException e) {
            e.printStackTrace();
        } catch (IOException e) {
            e.printStackTrace();
        } finally {

            try {
                if (in != null) {
                    in.close();
                }
                if (inputStream != null) {
                    inputStream.close();
                }
                if (outputStream != null) {
                    outputStream.close();
                }
                if (printWriter != null) {
                    printWriter.close();
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }
    /**
     * src //本地文件名
     * dst //目标文件名
     * 上传文件
     */
    public static  void uploadFile(String src,String dst){
        ChannelSftp  chSftp = null;
        Channel channel = null;
        try {
            channel =  session.openChannel("sftp");
            channel.connect();
            chSftp = (ChannelSftp)channel;
             // 代码段1
             OutputStream out = chSftp.put(dst, ChannelSftp.OVERWRITE); // 使用OVERWRITE模式
             byte[] buff = new byte[1024 * 256]; // 设定每次传输的数据块大小为256KB
             int read;
             if (out != null) {
             System.out.println("Start to read input stream");
             InputStream is = new FileInputStream(src);
             do {
                 read = is.read(buff, 0, buff.length);
                 if (read > 0) {
             out.write(buff, 0, read);
             }
             out.flush();
             } while (read >= 0);
             System.out.println("input stream read done.");
             }
           // chSftp.put(src, dst, ChannelSftp.OVERWRITE); // 代码段2
           // chSftp.put(new FileInputStream(src), dst, ChannelSftp.OVERWRITE); // 代码段3
            chSftp.quit();
            if (chSftp != null) {
                chSftp.disconnect();
            }
            if (session != null) {
                session.disconnect();
            }
        } catch (JSchException e) {
            e.printStackTrace();
        }catch (SftpException e) {
            e.printStackTrace();
        }catch (FileNotFoundException e) {
            e.printStackTrace();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    /**
     * 执行算法
     */
    public static void mkShell(String K_MeansMllib){
        try {
            ShellUtils.connect("root","4U5g1ZO7jas8Y","122.114.147.132");
            String m = "bin/spark-submit --master local[8] --class ";
            String m1 = "bin/spark-submit --master yarn --deploy-mode cluster --driver-memory 4g --executor-memory 2g --executor-cores 1  --class ";
            String sf = "sql.JavaSparkSQLExample examples/jars/com.eyes-1.4.jar";
            String sf1 = "SparkTestHDFS examples/jars/com.eyes-1.4.jar";
            String sf3 = "SparkTest examples/jars/com.eyes-1.0.jar";
            String sf4 = "TestMainArgs examples/jars/com.eyes-1.4.jar yyk rww";
            String sf5 = "mllib.Word2Vec examples/jars/com.hawkeye_ai_library-1.0.jar";
            String sf7 = "mllib.JavaLogisticRegressionWithLBFGSExample examples/jars/com.hawkeye_ai_library-1.0.jar file:///user/temp/test1.txt";
            String sf8 = "mllib.ReadParquet examples/jars/com.hawkeye_ai_library-1.0.jar";
            String sf6 = "mllib.JavaChiSqSelectorExample examples/jars/com.hawkeye_ai_library-1.0.jar";
            String sf2 = "org.apache.spark.examples.SparkPi examples/jars/spark-examples_2.11-2.1.1.jar";
            String sf9 = "SimpleApp examples/jars/com.hawkeye_ai_library-1.0.jar file:///user/temp/yyk.md yyk rww";

            String K_Means="ml.JavaKMeansExample examples/jars/com.hawkeye_ai_library-1.0.jar file:///user/temp/sample_kmeans_data.txt";
            // String K_MeansMllib="mllib.JavaKMeansExample examples/jars/com.hawkeye_ai_library-1.0.jar file:///user/temp/uci_data.txt";
//            String K_MeansMllib="mllib.JavaKMeansExample examples/jars/com.hawkeye_ai_library-1.0.jar file:///user/temp/kmeans_data.txt";
//            ShellUtils.doShell(m,sf3);
            ShellUtils.doShell(m,K_MeansMllib);
        } catch (JSchException e) {
            e.printStackTrace();
        }
    }
    /**
     * Hdfs读取数据
     */
    public static String readResult(String path,Integer userid){
        try {
            String dst = "hdfs://122.114.147.132:8020/user_"+userid+"/"+path+"/part-00000";
            Configuration conf = new Configuration();
            FileSystem fs = FileSystem.get(URI.create(dst), conf);
            FSDataInputStream hdfsInStream = fs.open(new Path(dst));
//            String resultPath  = "d:/result.txt";
            String resultPath  = "/result.txt";
            OutputStream out = new FileOutputStream(resultPath);
            byte[] ioBuffer = new byte[1024];
            int readLen = hdfsInStream.read(ioBuffer);

            while(-1 != readLen){
                out.write(ioBuffer, 0, readLen);
                readLen = hdfsInStream.read(ioBuffer);
            }
            out.close();
            hdfsInStream.close();
            fs.close();
            return resultPath;
        } catch (Exception e) {
            e.printStackTrace();
        }
        return  null;
    }
}
