package com.flute.icrawler.app.task.entity;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.InvalidPropertiesFormatException;
import java.util.Properties;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.flute.icrawler.app.distributed.CrawlerJobSubmitter;
import com.flute.icrawler.app.task.ITaskState;

public class CrawlTask {

	private static final Logger LOGGER = LoggerFactory
			.getLogger(CrawlTask.class);

	public static final String CONFIG_FILENAME = "crawler.xml";
	public static final String INFO_FILENAME = "taskinfo.xml";
	public static final String TASK_LIST = "TASKLIST";

	public static final String TASK_DES_INTERVAL = "interval";

	public static final String TASK_DES_NODECOUNT = "nodeCount";

	public static final String TASK_DES_SCALABLE = "scalable";

	public static final String TASK_DES_FINISHTIME = "lastFinishTime";

	public static final ITaskState STATE_FINISH = new FinishedTaskState();

	public static final ITaskState STATE_WAITING = new WaitingTaskState();

	public static final ITaskState STATE_RUNNING = new RunningTaskState();

	// 配置文件路径
	private String crawlCfgFile = "";

	// 任务描述文件名
	private String infoFileName = "";

	private long ID = 0;
	private int submitCount = 0;
	private String name = "";
	private ITaskState taskState = null;
	private long lastModfiyTime = 0;

	/**
	 * 爬虫任务分两类 一类是运行一次的 另一类是周期运行
	 */
	private long period = 0;
	private CrawlerJobSubmitter crawlerJobSubmitter = new CrawlerJobSubmitter();

	// 任务属性
	private Properties properties = new Properties();

	public ITaskState getTaskState() {
		return taskState;
	}

	public void setTaskState(ITaskState state) {
		if (null != state && taskState != state) {
			this.taskState = state;
		}

	}

	/**
	 * 根据任务名称获取配置文件路径创建新任务
	 * 
	 * @param taskName
	 *            任务名称
	 */
	public CrawlTask(String taskName) {
		this.name = taskName;
		ID = System.currentTimeMillis();
		// 配置文件路径
		this.crawlCfgFile = CrawlTask.TASK_LIST + File.separator + taskName
				+ File.separator + CrawlTask.CONFIG_FILENAME;
		LOGGER.info("CrawlTask:Config file Path=" + crawlCfgFile);
		// 任务的描述文件
		// 描述文件路径
		this.infoFileName = CrawlTask.TASK_LIST + File.separator + taskName
				+ File.separator + CrawlTask.INFO_FILENAME;

		// 加载任务描述文件
		loadTaskInfo();

	}

	/**
	 * 加载任务信息 如果没有变化则直接返回
	 */
	public void loadTaskInfo() {

		try {
			File file = new File(infoFileName);
			long modfiyTime = file.lastModified();
			if (lastModfiyTime == modfiyTime) {
				return;
			}
			FileInputStream inputStream = new FileInputStream(infoFileName);
			properties = new Properties();
			properties.loadFromXML(inputStream);
		} catch (InvalidPropertiesFormatException e) {
			LOGGER.info(e.getLocalizedMessage());
		} catch (FileNotFoundException e) {
			LOGGER.info(e.getLocalizedMessage());
		} catch (IOException e) {
			LOGGER.info(e.getLocalizedMessage());
		}
	}

	/**
	 * 提交爬虫任务
	 * 
	 * @return
	 */
	public boolean submitCrawlerJob() {

		submitCount++;
		// 提交任务 并设置相关参数，不设置启动的节点数和允许浮动
		return crawlerJobSubmitter.submitCrawlerJob(getStartNodeCount(),
				getScalable(), crawlCfgFile);
	}

	/**
	 * 提交任务 并设置相关参数，设置需要启动的节点数和是否允许浮动 -1 为随机分配
	 * 
	 * @param nodesNeed
	 * @param scalable
	 * @return
	 */
	public boolean submitCrawlerJob(int nodesNeed, boolean scalable) {

		submitCount++;
		return crawlerJobSubmitter.submitCrawlerJob(nodesNeed, scalable,
				crawlCfgFile);
	}

	/**
	 * 判断任务是否完成
	 * 
	 * @return
	 */
	public boolean isFinished() {

		try {
			return crawlerJobSubmitter.isFinished();
		} catch (Exception e) {
			LOGGER.info(e.getLocalizedMessage());
			return false;
		}

	}

	public void checkState() throws Exception {
		if ((0 == getIntervalDay() && getFinishTime() != 0)
				|| (taskState == STATE_RUNNING && crawlerJobSubmitter
						.isFinished())) {
			LOGGER.info("crawlTask is finished Name=" + name);
			taskState = STATE_FINISH;
		}
		// 周期任务已完成 但运行周期点未到
		// else if(0 != getIntervalDay() && getFinishTime() != 0
		// && System.currentTimeMillis() < getFinishTime() + getIntervalDay())
		// {
		// taskState = new WaitingTaskState();
		// }
		else {
			taskState = STATE_WAITING;
		}
	}

	/**
	 * 任务结束后标识已经完成
	 */
	public void setFinished() {
		properties.setProperty(CrawlTask.TASK_DES_FINISHTIME,
				"" + System.currentTimeMillis());
		storeProperty();
	}

	/**
	 * 获取任务周期
	 * 
	 * @return
	 */
	public long getPeriod() {
		return period;
	}

	/**
	 * 被提交次数
	 * 
	 * @return
	 */
	public int getSubmitCount() {
		return submitCount;
	}

	public long getTaskID() {
		return ID;
	}

	public String getName() {
		return name;
	}

	public void setName(String name) {
		this.name = name;
	}

	/**
	 * 获取启动节点个数
	 * 
	 * @return
	 */
	public int getStartNodeCount() {
		String startNodeCount = properties
				.getProperty(CrawlTask.TASK_DES_NODECOUNT);
		if (null == startNodeCount) {
			return -1;
		}

		return Integer.parseInt(startNodeCount.trim());
	}

	/**
	 * 设置启动节点个数
	 * 
	 * @param startNodeCount
	 */
	public void setStartNodeCount(int startNodeCount) {

		properties.setProperty(CrawlTask.TASK_DES_NODECOUNT,
				String.valueOf(startNodeCount));
		storeProperty();

	}

	/**
	 * 获取任务结束时间 单位是毫秒
	 * 
	 * @return
	 */
	public long getFinishTime() {
		// 获取任务的结束时间 周期 以及是否放入池中的标识
		String strFinishTime = properties
				.getProperty(CrawlTask.TASK_DES_FINISHTIME);
		long finishTime = 0;

		if (null != strFinishTime && !"".equalsIgnoreCase(strFinishTime)) {
			finishTime = Long.parseLong(strFinishTime);
		}

		return finishTime;

	}

	/**
	 * 获取任务周期 单位是天
	 * 
	 * @return
	 */
	public int getIntervalDay() {
		String strInterval = properties
				.getProperty(CrawlTask.TASK_DES_INTERVAL);
		int interval = 0;

		if (null != strInterval && !"".equalsIgnoreCase(strInterval)) {
			interval = Integer.parseInt(strInterval);
		}

		return interval;
	}

	/**
	 * 获取任务周期
	 * 
	 * @return
	 */
	public long getInterval() {
		String strInterval = properties
				.getProperty(CrawlTask.TASK_DES_INTERVAL);
		long interval = 0;

		if (null != strInterval && !"".equalsIgnoreCase(strInterval)) {
			interval = Integer.parseInt(strInterval) * 24 * 60 * 60 * 1000L;
		}

		return interval;

	}

	/**
	 * 节点不够是 是启动任务还是等待
	 * 
	 * @return
	 */
	public boolean getScalable() {

		String scalable = properties.getProperty(CrawlTask.TASK_DES_SCALABLE);

		if (null != scalable && "yes".equalsIgnoreCase(scalable)) {
			return true;
		}
		return false;
	}

	private void storeProperty() {
		try {
			FileOutputStream outputStream = new FileOutputStream(infoFileName);
			properties.storeToXML(outputStream, "");
		} catch (FileNotFoundException e) {
			LOGGER.info(e.getLocalizedMessage());
		} catch (IOException e) {
			LOGGER.info(e.getLocalizedMessage());
		}
	}

	public static void main(String[] args) {
		CrawlTask crawlTask = new CrawlTask("qq");
		crawlTask.loadTaskInfo();
	}
}
