package com.gdzl_hadoop.service.Impl;

import com.gdzl_hadoop.common.ConnectToLinux;
import com.gdzl_hadoop.common.ServerResponse;
import com.gdzl_hadoop.service.IExecuteAlgorithmService;
import com.gdzl_hadoop.util.DbfUtil;
import com.gdzl_hadoop.util.PropertiesUtil;
import org.springframework.stereotype.Service;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;

/**
 * Created by 1032019725 on 2017/10/31.
 */
@Service("iExecuteAlgorithmService")
public class ExecuteAlgorithmServiceImpl implements IExecuteAlgorithmService {
    @Override
    public ServerResponse<String> executeFPG(String guid) {

//      FutureTask<String> task = new FutureTask<String>((Callable<String>)()->{
//            // 做成配置文件的形式
//          ConnectToLinux ct = new ConnectToLinux(PropertiesUtil.getProperty("hadoop.server.master.ip"),PropertiesUtil.getProperty("hadoop.server.master.user"),PropertiesUtil.getProperty("hadoop.server.master.password"));
//          //  WinToLinux winToLinux = new WinToLinux("118.89.54.21","hadoop","pass@word1");
//          // InputStream is =  winToLinux.executeCommand("cp ./result.txt  ./cp_result.txt");
//            //return ct.execute(PropertiesUtil.getProperty("hadoop.server.command.fpg.execute") + " " + guid);
//          return ct.executeShell(String.format(PropertiesUtil.getProperty("hadoop.server.command.fpg"), guid, guid, guid, guid, guid));
//        });
//        new Thread(task,"execute FPG algorithm").start();
        ConnectToLinux ct = new ConnectToLinux(PropertiesUtil.getProperty("hadoop.server.master.ip"), PropertiesUtil.getProperty("hadoop.server.master.user"), PropertiesUtil.getProperty("hadoop.server.master.password"));
        String message = ct.executeShell(String.format(PropertiesUtil.getProperty("hadoop.server.command.fpg"), guid, guid, guid, guid, guid));
        if (message != null && message == "error") {
            return ServerResponse.createByErrorMessage("连接Hadoop服务器错误");
        }
        return ServerResponse.createBySuccess();
    }

    public ServerResponse<String> executeHLXFX(String guid) {
        //新建一个线程
//        FutureTask<String> task = new FutureTask<String>((Callable<String>)()->{
//            //做成配置文件的形式
//            ConnectToLinux ct = new ConnectToLinux(PropertiesUtil.getProperty("hadoop.server.master.ip"),PropertiesUtil.getProperty("hadoop.server.master.user"),PropertiesUtil.getProperty("hadoop.server.master.password"));
//            return  ct.execute("cp ./out1.txt  ./cp_out1.txt");
//        });
//        new Thread(task,"execute HLXFX algorithm").start();
        ConnectToLinux ct = new ConnectToLinux(PropertiesUtil.getProperty("hadoop.server.master.ip"), PropertiesUtil.getProperty("hadoop.server.master.user"), PropertiesUtil.getProperty("hadoop.server.master.password"));
        String message = ct.executeShell(PropertiesUtil.getProperty("hadoop.server.command.HLXFX.execute") + " " + guid);
        if (message != null && message == "error") {
            return ServerResponse.createByErrorMessage("连接Hadoop服务器错误");
        }

        return ServerResponse.createBySuccess();
    }

    public ServerResponse<String> executeRandomForest(String guid) {

        ConnectToLinux ct = new ConnectToLinux(PropertiesUtil.getProperty("hadoop.server.master.ip"), PropertiesUtil.getProperty("hadoop.server.master.user"), PropertiesUtil.getProperty("hadoop.server.master.password"));
        String message = ct.executeShell(String.format(PropertiesUtil.getProperty("hadoop.server.command.RandomForest.execute"), guid, guid, guid, guid, guid, guid, guid, guid, guid, guid));
        if (message != null && message == "error") {
            return ServerResponse.createByErrorMessage("连接Hadoop服务器错误");
        }

        return ServerResponse.createBySuccess();
    }

    @Override
    public ServerResponse<String> executeFPG(String year, String xzqdm, double minSupPro, String pathRoot) {
        File dbfFile = new File(pathRoot + File.separator + "XJFDDY.dbf");
        StringBuffer sb = DbfUtil.readXJFDDY(dbfFile);
        int minSupport = (int) (minSupPro * DbfUtil.countTrans(dbfFile));//最小支持度
        try {
            File tempFile = File.createTempFile(year + xzqdm, ".dat", new File(pathRoot));
            //   tempFile.renameTo(new File(pathRoot+File.separator+year + xzqdm + ".dat"));
            FileOutputStream fos = new FileOutputStream(tempFile);
            // 构建OutputStreamWriter对象,参数可以指定编码,默认为操作系统默认编码,windows上是gbk
            OutputStreamWriter writer = new OutputStreamWriter(fos, "UTF-8");
            writer.write(sb.toString());
            writer.flush();
            writer.close();

            ConnectToLinux ct = new ConnectToLinux(PropertiesUtil.getProperty("hadoop.server.master.ip"), PropertiesUtil.getProperty("hadoop.server.master.user"), PropertiesUtil.getProperty("hadoop.server.master.password"));
            ct.login();
            ct.uploadFileToHdfs(tempFile.getAbsolutePath(), tempFile.getName(), PropertiesUtil.getProperty("hadoop.server.remoteDir"), PropertiesUtil.getProperty("hadoop.server.hdfs"));
            String guid = year + xzqdm;
            String message = ct.executeShell(String.format(PropertiesUtil.getProperty("hadoop.local.fpg.execute"), tempFile.getName(), guid, minSupport, guid, guid, guid, guid, guid, guid, guid));
            tempFile.deleteOnExit();
            if (message != null && message == "error") {
                return ServerResponse.createByErrorMessage("连接Hadoop服务器错误");
            }

        } catch (IOException e) {
            e.printStackTrace();
        }
        return ServerResponse.createBySuccessMessage("最小支持度:" + minSupPro);
    }

}
