package portal.engine;

import java.io.IOException;
import java.io.Serializable;
import java.util.*;

import org.springframework.beans.factory.annotation.Autowired;

import com.alibaba.fastjson.JSONObject;

import beans.*;
import portal.beans.*;
import portal.dao.ScheduleJobDetailDao;
import engine.BIEngine0_1.AlgorithmOperator;
import net.CommitJob;
import net.HttpRequest;
//import net.sf.json.JSONObject;
import portal.dao.ScheduleSocketLogDao;
import portal.util.SpringContextUtil;
import utils.FtpuploadUtil;

public class JobThread extends Thread implements Serializable {
    private static final long serialVersionUID = 6768353726843187465L;
    public String token;
    public String taskHash;
    public String jobId = null;
    public String algorithmHash;
    public String status;

    @Autowired
    public ScheduleJobDetailDao scheduleJobDetailDao=SpringContextUtil.getBean(ScheduleJobDetailDao.class);
    @Autowired
    public ScheduleSocketLogDao scheduleSocketLogDao=SpringContextUtil.getBean(ScheduleSocketLogDao.class);
    public FtpuploadUtil ftpuploadUtil =SpringContextUtil.getBean(FtpuploadUtil.class);
    private Job job;
    private HttpResult httpResult;
    public JobThread(Job job,HttpResult httpResult) {
        this.job = job;
        this.httpResult = httpResult;
//        this.scheduleSocketLogDao=SpringUtils.getApplicationContext().getBean(ScheduleSocketLogDao.class);
//        this.scheduleJobDetailDao=SpringUtils.getApplicationContext().getBean(ScheduleJobDetailDao.class);
        //this.scheduleJobDetailDao = new ScheduleJobDetailDao();
    }
    // 作业完成时，服务器访问工作流，启动线程更新作业信息
    public JobThread(String taskHash,String jobId,String algorithmHash) {
        this.taskHash = taskHash;
        this.jobId = jobId;
        this.algorithmHash = algorithmHash;
        //this.scheduleJobDetailDao = new ScheduleJobDetailDao();
    }
    
    public JobThread(String taskHash,String jobId,String algorithmHash,String status) {
        this.taskHash = taskHash;
        this.jobId = jobId;
        this.algorithmHash = algorithmHash;
        this.status = status;
        //this.scheduleJobDetailDao = new ScheduleJobDetailDao();
    }

    @Override
    public void run() {
        if(jobId!=null){
            this.updateScheduleJobFullState();
        }else {
            this.startJob();
        }
    }

    private void startJob(){

        /**
         * 从服务器获取token
         */
        HttpResult tokenResult = JobOperator.getToken(HttpConfig.userName, HttpConfig.passWord);
        if(tokenResult.isResult()){
            this.token = tokenResult.getMessage();
        }else{
            System.out.println("从服务器获取token失败");
        }

        /**
         * 上传作业执行文件
         * 拼接上传文件后，服务器端的地址 由http.roperties里面的
         * HttpConfig.ftpJobUri+UUID.randomUUID().toString().substring(10)+yyyy-MM-dd-HH-mm-ss"+algorithmFileName
         */
        List<String> ftpJobFileUriList = ftpuploadUtil.uploadJobFileByJobUri(job);
        if(ftpJobFileUriList.equals("")){
            System.out.println("作业依赖文件上传失败");
        }

        /**
         * 提交并启动一个job
         */
        String cmd = job.getCommand();
       
        String jobHash = job.getJobHash();
        String userName = HttpConfig.userName;
        String serverPlatform = job.getOsType();
        String priority = job.getJobPriority();
        String gpuType = HttpConfig.gpuType;
        Boolean useGpu = job.getUseGPU();
        String resource = HttpConfig.resource;
        String execNode = HttpConfig.execNode;
        Boolean useMPI = job.getUseMPI();
        String nodeGroup = job.getNodeGroup();
        String strategy = job.getStrategy();
        String cpuNum = job.getCPUkernelNumber();
        String jh_files = "";
        String ftpFilePath = "";
        for(int rf=0;rf<ftpJobFileUriList.size();rf++){
            jh_files = jh_files+" "+ftpJobFileUriList.get(rf);
            ftpFilePath = ftpJobFileUriList.get(rf);
        }

        CommitJob commitJob = new CommitJob(token,cmd,jobHash,jobHash,userName,serverPlatform,priority,
                gpuType,useGpu,resource,execNode,useMPI,nodeGroup,strategy,jh_files,cpuNum);

        HttpResult startResult = null;
        startResult = AlgorithmOperator.startJob(commitJob);
        if(startResult.isResult()){
            job.setJobId(startResult.getMessage());


            algorithmHash = "uiAlgorithm_1";
            HttpResultList result = null;
            try {
                result = AlgorithmOperator.getJobFullState(job.getJobId(), token);
            } catch (IOException e) {
                e.printStackTrace();
            }
            FullJobState fullJobState = (FullJobState)result.getDatas().get(0);
            String executionHost = fullJobState.getExecutionHost().split("]")[0].replaceAll("[\"\\[]", "");
            String cpuUsage = "";
            //获取节点资源
            HttpResultList hostList = AlgorithmOperator.getJobBasicState(this.token);
            for (int j = 0; j < hostList.getDatas().size(); j++) {
                BaseResourceState baseResourceState = (BaseResourceState) hostList.getDatas().get(j);
                if (baseResourceState.getHostName().equals(executionHost)) {
                    cpuUsage = baseResourceState.getUt();
                }
            }
         // 在schedule_job_detail中记录
            ftpFilePath = ftpFilePath.substring(0,ftpFilePath.lastIndexOf("/")+1);
            Boolean addJod = scheduleJobDetailDao.insertScheduleJobDetail(commitJob,job,"RUNNING",ftpFilePath,
                    "1",executionHost,cpuUsage);
            
            // 在socket_log中记录
            taskHash = "通过页面启动一个作业--"+job.getJobHash();
            String algorithmState = "2";//2是代表运行
            String subTaskHash = "1";
            Boolean addLog = scheduleSocketLogDao.insertJobState(HttpConfig.socketLogLevel,algorithmState,taskHash,
                    subTaskHash,job.getJobId());
            httpResult.setResult(true);
            httpResult.setMessage("作业启动成功");

            //将作业添加到作业队列
            JobQueue.add(job.getJobId());

        }else{
            System.out.println("作业提交启动失败，服务器原因："+startResult.getMessage());
            httpResult.setResult(false);
            httpResult.setMessage("作业提交启动失败，服务器原因："+startResult.getMessage());
        }
    }

    /**
     * update job state
     * @return
     */
    private void updateScheduleJobFullState(){

        
        //更改数据库作业状态
    /*    switch(status){
            case "0":
                status = "DONE";
                break;
            default:
                status = "ERROR";

        }*/
        
    if(status.equals("0")){
        	
    	 HttpResult tokenResult = JobOperator.getToken(HttpConfig.userName, HttpConfig.passWord);
         if(tokenResult.isResult()){
             this.token = tokenResult.getMessage();
         }else{
             System.out.println("从服务器获取token失败");
         }
    	 String callbackResult = HttpRequest.sendGet(HttpConfig.stateQueryUrl + jobId, "token=" + this.token);
    	 if(callbackResult!=null)
    			 callbackResult=callbackResult.replace("\\", "/");
         JSONObject result = JSONObject.parseObject(callbackResult);
         HttpResultList list = AlgorithmOperator.pickFullJobState(result,jobId);
         FullJobState fullJobState = (FullJobState)list.getDatas().get(0);
         status = fullJobState.getStatus();
         
        }else{
        	status = "EXIT";
        }
        
        scheduleJobDetailDao.updateStateById(status, jobId);
        if(HttpConfig.fake){
            scheduleSocketLogDao.updateLogFileByJobId("D:/JCPT_files/socketLog/2018-07-08-16-49-31-时间不用管-测试使用.txt",jobId);
        }else{

            String fileUrl = JobOperator.queryLogFileUrl(jobId);
            //查询到日志文件地址，进一步去下载文件到本地服务器
            if(fileUrl!=null){
                //将远端日志文件下载到本地服务器
                String newFilePath = JobOperator.downloadLogFile(fileUrl,jobId);
                if(newFilePath!=null) {
                    //将日志文件路径记录到数据库
                    scheduleSocketLogDao.updateLogFileByJobId(newFilePath, jobId);
                }
            }
        }

    }

}

