package com.flute.icrawler.app.distributed;

import java.io.File;
import java.util.Properties;

import com.flute.haflute.client.ClientJobTracer;
import com.flute.haflute.client.Constants;
import com.flute.haflute.jobbox.base.CloudJobConf;

public class CrawlerJobSubmitter {
	private ClientJobTracer tracer;
	private CloudJobConf jobConf;
	
	/**
	 * @param nodesNeed 需要运行作业的节点数（任务数目）
	 * @param scalable 节点数是否可浮动，即当可用节点数少于nodesNeed时，也可执行
	 * @param files 需要传输的文件
	 * @return
	 */
	public boolean submitCrawlerJob(int nodesNeed, boolean scalable, String... files) {
		jobConf = new CloudJobConf();
		jobConf.setTaskRunner(DistributedJobController4Child.class);
		jobConf.setCenterJobController(DistributedJobController4Server.class);
		jobConf.setTasksNeeded(nodesNeed);
		jobConf.setScalable(scalable);
		
		if(files != null) {
			Properties props = new Properties();
			if (files != null && files.length > 0) {
				for (String file : files) {
					if(file.endsWith(".jar") || file.endsWith(".class")) {
						//load jar or class
					}
					else {
						File configFile = new File(file);
						if(configFile.exists()) {
							props.setProperty(Constants.FILEPREFIX + configFile.getName(), configFile.getAbsolutePath());
						}
					}
				}
			}
			jobConf.setJobContext(props);
		}
		
		try {
			tracer = new ClientJobTracer();
			tracer.submitJob(jobConf);
			boolean submitOk = tracer.awaitSubmitSuccessUninterruptly();
			if(submitOk) {
				System.out.println("Job successfully submitted");
			}
			else {
				System.out.println("Job submit FAILED, please see details");	
				tracer = null;
			}
			return submitOk;
		} catch (Exception e) {
			e.printStackTrace();
		}
		
		return true;
	}
	
	/**
	 * @return JOB是否已经执行完成
	 */
	public boolean isFinished() throws Exception{
		if(tracer == null)
			throw new Exception("You didn't submit a job yet!");
		return tracer.isJobFinished();
	}
}
