package cn.com.infcn.spider.scheduler.http;

import java.io.BufferedReader;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicInteger;

import org.nlpcn.commons.lang.util.IOUtil;
import org.nlpcn.commons.lang.util.StringUtil;
import org.nutz.lang.stream.StringReader;

import com.mongodb.BasicDBObject;
import com.mongodb.DBCollection;

import cn.com.infcn.spider.SchedulerManager;
import cn.com.infcn.spider.URIManager;
import cn.com.infcn.spider.domain.HttpJob;
import cn.com.infcn.spider.domain.Page;
import cn.com.infcn.spider.domain.RegexDomain;
import cn.com.infcn.spider.domain.UrlInfo;
import cn.com.infcn.spider.io.input.Input;
import cn.com.infcn.spider.io.input.InputFactory;
import cn.com.infcn.spider.io.input.impl.listInput;
import cn.com.infcn.spider.io.output.OutputFactory;
import cn.com.infcn.spider.scheduler.Scheduler;
import cn.com.infcn.spider.util.MongoDao;
import cn.com.infcn.spider.util.StaticValue;

/**
 * job调度类
 * 
 * @author Ansj
 *
 */
public class HttpScheduler extends Scheduler {

	private ExecutorService es = null;

	private boolean flag = true;

	private static final int MAX_TIMES = 15;

	private List<RegexDomain> regexList = new ArrayList<>();

	public HttpScheduler(HttpJob job) {
		super(job);
		parseRegexList();
	}

	/**
	 * 解析正则表达式
	 * 
	 * @return
	 */
	private void parseRegexList() {
		String regexFilter = ((HttpJob) job).getRegexFilter();

		if (StringUtil.isBlank(regexFilter)) {
			return;
		}

		List<String> readFile2List = IOUtil.readFile2List(new BufferedReader(new StringReader(regexFilter)));

		for (String line : readFile2List) {
			if (StringUtil.isBlank(line)) {
				continue;
			}
			regexList.add(new RegexDomain(line));
		}

	}

	@Override
	public void shutdown() {
		try {
			super.shutdown();
		} catch (Exception e) {
			e.printStackTrace();
			LOG.error(e);
		}
		flag = false;

		if (es != null)
			es.shutdown();
	}

	@Override
	public void run() {

		for (Input input : inputs) {
			if (input instanceof listInput) {
				for (Object obj : input.all()) {
					URIManager.add(job, new UrlInfo((String) obj), false);
				}
			}
		}

		HttpJob job = ((HttpJob) this.job);

		int threadNum = job.getThreadNum();

		// 创建线程队列
		es = Executors.newScheduledThreadPool(threadNum);

		AtomicInteger atomicInteger = new AtomicInteger();

		int errCount = job.getErrCount();

		int emptyCount = 0;
		
		this.job.setStartTime(new Date());
		
		while (flag) {
			
			UrlInfo urlInfo = (UrlInfo) URIManager.pop(job);

			if (urlInfo == null) { // 网址为空休息2s
				emptyCount++;

				try {
					Thread.sleep(2000L);
				} catch (InterruptedException e) {
					flag = false;
					break;
				}
				if (emptyCount >= MAX_TIMES) {
					if (errCount > 0) {
						errCount--;
						LOG.info("so to try reset err!");
						long resetCount = URIManager.resetErr(job);
						LOG.info("rest url errs count " + resetCount);
						if (resetCount == 0) {
							flag = false;
						}
					} else {
						flag = false;
					}
				}
				continue;
			}
			
			
			if (job.getDepth() > 0 && urlInfo.getDepth() > job.getDepth()) {
				LOG.info("job depth is " + job.getDepth() + " url depth is " + urlInfo.getDepth() + " so skip!");
				continue;
			}

			emptyCount = 0;

			atomicInteger.incrementAndGet();
			try {

				while (atomicInteger.get() > threadNum) {
					try {
						LOG.info("run thread:" + atomicInteger.get() + " config threadNumL" + threadNum + " now to sleep 10s!");
						Thread.sleep(10000L);
					} catch (InterruptedException e) {
						flag = false;
						break;
					}
				}

				es.execute(new HttpWorker(this, (HttpJob) job, regexList, urlInfo, atomicInteger));

			} catch (Exception e) {
				e.printStackTrace();
			}

		}

		// 说明结束了
		if (emptyCount >= MAX_TIMES) {
			URIManager.changeStatus(job.getName(), 2);
		}
		
		this.job.setEndTime(new Date());
		
		SchedulerManager.removeKey(job);
	}

	@Override
	public boolean isRuning() {
		return flag;
	}

	@Override
	protected void init() {

		String jobName = job.getName();

		inputs = InputFactory.create(job.getInput(), jobName);

		// 创建MongoDB索引
		MongoDao mongoDao = StaticValue.getBean(MongoDao.class, "mongoDao");
		DBCollection collection = mongoDao.getCollection(URIManager.SUCCESS + jobName);
		if (collection.count() == 0) {
			collection.createIndex(new BasicDBObject("update_time", 1));
		}
		collection = mongoDao.getCollection(URIManager.ERROR + jobName);
		if (collection.count() == 0) {
			collection.createIndex(new BasicDBObject("update_time", 1));
		}

		outputs = OutputFactory.create(job.getOutput(), jobName);
	}

	@Override
	public Object test(String url) {
		Page page = new HttpWorker(this, (HttpJob) job, regexList, new UrlInfo(url), new AtomicInteger()).explan();

		page.setDocument(null);
		page.setHtml(null);
		page.setUrls(null);

		return page;
	}

}
