/**
 *
 * Copyright 2015 RSVP Technologies Inc. All rights reserved.
 * BaseHtmlParser.java
 *
 */
package com.gfinal.webcollector;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;

import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

/**
 * 网页抓取基类
 * @author Gao Yudong (gyd@nbicc.com)
 * @date 2015-3-3
 */
public abstract class BaseWebCollector {
	protected int TIME_OUT = 60 * 1000;
	private int mThreadPoolSize = 0;
	private ExecutorService exec = null;
	
	/**
	 * 带解析的url队列
	 */
	private List<String> mSeedUrlsList = null;
	
	private List<Future<String>> mFutureList = new ArrayList<Future<String>>();

	public BaseWebCollector() {
		this.mThreadPoolSize = Runtime.getRuntime().availableProcessors();
		mSeedUrlsList = generateSeedUrlsList();
	}
	
	public BaseWebCollector(int threadPoolSize) {
		this.mThreadPoolSize = threadPoolSize;
		mSeedUrlsList = generateSeedUrlsList();
	}

	public void execute() {
		if (mSeedUrlsList == null || mSeedUrlsList.isEmpty()) {
			return;
		}

		exec = Executors.newFixedThreadPool(mThreadPoolSize);
		for (String url : mSeedUrlsList) {
			ParserTask task = new ParserTask(url);
			Future<String> f = exec.submit(task);
			mFutureList.add(f);
		}
		exec.shutdown();
	}

	/**
	 * 生成待解析的网页列表
	 * 
	 * @return 待解析的网页列表
	 */
	protected abstract ArrayList<String> generateSeedUrlsList();

	/**
	 * 抓取主体
	 * 
	 * @param doc
	 * @return
	 */
	protected abstract Object getResultSet(String url, Document doc);

	/**
	 * 用于处理抓取结果
	 * 
	 * @param result
	 */
	protected abstract boolean saveResult(Object o);

	/**
	 * 用于抽取页面中的特定链接
	 * 
	 * @param parentUrls
	 * @return
	 */
	protected ArrayList<String> extractUrls(ArrayList<String> parentUrls) {
		ArrayList<String> urls = new ArrayList<String>();
		for (int i = 0; i < parentUrls.size(); i++) {
			try {
				Document doc = Jsoup.connect(parentUrls.get(i))
						.timeout(TIME_OUT).get();
				Elements elements = doc.select("a");
				for (Element element : elements) {
					String url = element.attr("href");
					if (isUrlFit(url)) {
						if (!urls.contains(url)) {
							urls.add(url);
						}
					}
				}
			} catch (IOException e) {
				e.printStackTrace();
			}
		}
		return urls;
	}

	protected boolean isUrlFit(String url) {
		return true;
	}
	
	/**
	 * 抓取/存储失败处理
	 * @return
	 */
	protected abstract boolean FailureHandle(String url);
	
	/**
	 * 获取所有已完成的任务列表
	 * @return
	 * @throws InterruptedException
	 * @throws ExecutionException
	 */
	public List<String> getCollectResult() throws InterruptedException, ExecutionException {
		List<String> result = new ArrayList<String>();
		for(Future<String> f : mFutureList) {
			if(f.isDone()) {
				String res = f.get();
				result.add(res);
			}
		}
		return result;
	}
	
	public boolean isFinish() throws Exception {
		if(exec == null) {
			throw new Exception("have not execute");
		}
		
		return exec.isTerminated();
	}

	class ParserTask implements Callable<String> {
		private String url = null;

		public ParserTask(String url) {
			this.url = url;
		}

		@Override
		public String call() throws Exception {
			String res = null;
			Document doc = Jsoup.connect(url).timeout(TIME_OUT).get();
			Object result = getResultSet(url, doc);
			boolean flag = saveResult(result);
			if(flag) {
				res = url + " is Successed";
			} else {
				if(! FailureHandle(url)) {
					res = url + " is failed...";
				} else {
					res = url + "is catched a exeception , but fixed";
				}
			}
			return res;
		}
	}
}
