package com.yk.mr.service.manage.impl;

import com.jcraft.jsch.*;
import com.yk.mr.dao.cluster.api.IClusterDao;
import com.yk.mr.model.cluster.ClusterModel;
import com.yk.mr.model.job.JobModel;
import com.yk.mr.model.submit.SubmitOptions;
import com.yk.mr.service.file.api.IFileService;
import com.yk.mr.service.manage.api.IJobSubmitService;
import jdk.nashorn.internal.runtime.regexp.joni.Regex;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.json.JSONException;
import org.json.JSONObject;
import org.springframework.stereotype.Service;

import javax.annotation.Resource;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.file.Path;
import java.util.HashMap;
import java.util.Map;

import static java.lang.System.in;

@Service("jobSubmitService")
public class JobSubmitServiceImpl implements IJobSubmitService {

    @Resource(name = "fileService")
    private IFileService fileService;

    @Resource(name="clusterDao")
    private IClusterDao clusterDao;

    private static JSch jsch;
    private static Session session;
    private Map<String,Object> result=new HashMap<>();

    /**
     * 连接到指定的IP
     *
     * @throws JSchException
     */
    public  void connect(String user, String passwd, String host) throws JSchException {
        jsch = new JSch();
        session = jsch.getSession(user,host,22); //默认端口为22
        session.setPassword(passwd);

        java.util.Properties config = new java.util.Properties();
        config.put("StrictHostKeyChecking", "no");
        session.setConfig(config);

        session.connect();
    }


    /**
     * 执行相关的命令
     * @throws JSchException
     */

    public StringBuffer execCmd(String command, String user, String passwd, String host) throws JSchException {
        System.out.println(host);
        connect(user, passwd, host);   //远程登录
        System.out.println(" Connect Success!");
        BufferedReader reader = null;
        Channel channel = null;
        try {
            if (command != null) {
                channel = session.openChannel("exec");
                System.out.println(command);
                ((ChannelExec) channel).setCommand(command);
                channel.setInputStream(null);
                ((ChannelExec) channel).setErrStream(System.err);
                channel.connect();
                InputStream in = channel.getInputStream();
                reader = new BufferedReader(new InputStreamReader(in));
                String buf = null;
                StringBuffer info = new StringBuffer("");
                while ((buf = reader.readLine()) != null) {
                    info.append("\n");
                    info = info.append(buf);
                }
                System.out.println(info);
                return info;
            }
        } catch (IOException e) {
            e.printStackTrace();
        } catch (JSchException e) {
            e.printStackTrace();
        } finally {
            try {
                reader.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
            channel.disconnect();
            session.disconnect();
        }
        return null;
    }
    @Override
    public Map<String,Object> submitMRJob(JobModel model,String clusterId) throws JSchException , JSONException {
//        String jarPath = model.getJarPath();

        ClusterModel clusterModel =this.clusterDao.queryByClusterId(clusterId);
        String jarPath = "/hadoop/jars/dedup.jar";
        SubmitOptions submitOptions=new SubmitOptions(model,this.fileService);
        String[] jobInput  = submitOptions.getAppArgs();
        String[] jobInputFile = jobInput[1].split(" ");
        String jobOutput = model.getJobOutput();
        JobSubmitServiceImpl submitService = new JobSubmitServiceImpl();
        try {
            result.put("submit",1);
            String cmd="hadoop jar "+jarPath;
            for (int i=0;i<jobInputFile.length;i++){
             cmd +=" "+jobInputFile[i]+" ";
            }
            cmd += jobOutput;
           submitService.execCmd(cmd,clusterModel.getUserName(),clusterModel.getUserPassword(),clusterModel.getIp());
           result.put("success",1);
        } catch (JSchException e) {
            result.put("error",1);
            e.printStackTrace();
        }
        return  result;

    }



}
