package com.ewei.web.crawler.pool;

import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;

import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.select.Elements;

import com.ewei.web.crawler.dao.Queue;
import com.ewei.web.crawler.dao.URLQueue;
import com.ewei.web.crawler.domain.URLs;
import com.ewei.web.crawler.service.CrawlerService;
import com.ewei.web.crawler.utils.ConnectionHandler;
import com.ewei.web.crawler.utils.RegxContext;

/**
 * 爬虫线程池
 * @author David
 *
 */
public class CrawlerThreadPool {
	
	public AtomicInteger num = new AtomicInteger(0);
	
	private static final int MAX_THREAD_SIZE = 5;
	
	private static final int MAX_TIME_FREE = 1000*20;
	//线程数
	private int thread_size = MAX_THREAD_SIZE;
	//线程最大空闲时间(以毫秒为单位)
	private int maxFree = MAX_TIME_FREE;
	//保存的cookies
	private volatile Map<String,String> cookies = new HashMap<String,String>();
	//爬虫时间
	private long crawlerTime = 0;
	//开关状态
	private volatile boolean isClosed = true;
	
	private volatile AtomicInteger actThreads = new AtomicInteger(0); 
	
	private Thread[] threads = null;
	
	private static CrawlerThreadPool pool = null;
	
	private Queue<URLs> queue = null;
	
	private ReentrantLock poolLock = new ReentrantLock();
	
	private Condition notNull = poolLock.newCondition();
	
	private CrawlerThreadPool() {
		queue = URLQueue.getInstance();
		threads = new Thread[thread_size];
		this.crawlerTime = System.currentTimeMillis();
	}
	public static CrawlerThreadPool getInstance(){
		if(null == pool){
			synchronized (CrawlerThreadPool.class) {
				if(null == pool){
					pool = new CrawlerThreadPool();
				}
			}
		}
		return pool;
	}
	/**
	 * 开启线程池
	 */
	public void start(){
		if(!isClosed||actThreads.get()>0){
			throw new RuntimeException("the pool doesn't stop completely!");
		}
		crawlerTime = System.currentTimeMillis();
		isClosed = false;
		threads = new Thread[thread_size];
		for(int i = 0;i<threads.length;i++){
			threads[i] = new Thread(new CrawlerThread());
			threads[i].start();
			actThreads.incrementAndGet();
		}
		
	}
	/**
	 * 关闭线程池
	 */
	public void close(){
		isClosed = true;
	}
	/**
	 * 添加资源链接
	 * @param url
	 */
	public void addURL(URLs url){
		poolLock.lock();
		try{
			queue.push(url);
			notNull.signal();
		}finally{
			poolLock.unlock();
		}
	}
	public Queue<URLs> getQueue() {
		return queue;
	}
	public void setQueue(Queue<URLs> queue) {
		this.queue = queue;
	}
	public Map<String, String> getCookies() {
		return cookies;
	}
	public void setCookies(Map<String, String> cookies) {
		this.cookies = cookies;
	}
	/**
	 * 爬虫工作线程
	 * @author David
	 *
	 */
	private class CrawlerThread implements Runnable{
		public void run() {
			while(true){
				if(isClosed){//线程池关闭
					break;
				}
				Document document = null;
				URLs url = null;
				poolLock.lock();
				url = queue.poll();
				try{
					if(null == url){
						notNull.await(maxFree, TimeUnit.MILLISECONDS);//线程等待一定时间
						url = queue.poll();
					}
				}catch(InterruptedException e){
					e.printStackTrace();
				}finally{
					poolLock.unlock();
					if(null == url){
						break;
					}
				}
				String urlStr = url.getUrl()+"/organize";//这里可以做更改
				try{
					document = ConnectionHandler.wrapper(Jsoup.connect(urlStr))
							.cookies(pool.getCookies())
							.get();
					Elements elements = document.getElementsByAttributeValue("id","zh-topic-organize-child-editor");
					String htmlBody = elements.toString();
					List<URLs> list = RegxContext.crawlDatas(RegxContext.topicUrl, htmlBody);
					pool.num.addAndGet(list.size());
					for(URLs el:list){
						System.out.println("话题:"+el.getUrl()+"  名字:"+el.getDescription());
						el.setUrl(CrawlerService.REFER+el.getUrl());//要做更改
						if(null == pool) System.out.println("空的");
						pool.addURL(el);
					}
				}catch(Exception e){
					queue.repush(url.getId());
				}
			}
			int index = actThreads.decrementAndGet();//活跃线程减少
			SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
			System.out.println("第"+index+"工作线程已关闭");
			System.out.println("开始时间:-->"+format.format(new Date(crawlerTime)));
			System.out.println("结束时间:-->"+format.format(System.currentTimeMillis()));
		}
	}
}
