﻿package ahxu.crawler;

import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadPoolExecutor;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.Header;
import org.apache.http.client.params.ClientPNames;
import org.apache.http.client.params.CookiePolicy;
import org.apache.http.client.utils.URIUtils;
import org.apache.http.conn.ClientConnectionManager;
import org.apache.http.conn.params.ConnManagerParams;
import org.apache.http.conn.params.ConnPerRouteBean;
import org.apache.http.conn.scheme.PlainSocketFactory;
import org.apache.http.conn.scheme.Scheme;
import org.apache.http.conn.scheme.SchemeRegistry;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.impl.client.DefaultHttpRequestRetryHandler;
import org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager;
import org.apache.http.message.BasicHeader;
import org.apache.http.params.BasicHttpParams;
import org.apache.http.params.CoreConnectionPNames;
import org.apache.http.params.CoreProtocolPNames;
import org.apache.http.params.HttpParams;

import com.sun.jndi.toolkit.url.UrlUtil;

/**
 * 
 * @author ahxu
 * 
 */
public class CrawlerManager {
	Log logger=LogFactory.getLog(CrawlerManager.class);
	//初始化的地址
	private ConcurrentLinkedQueue<CrawlerSite> crawlerSites = new ConcurrentLinkedQueue<CrawlerSite>();
	
	//当前层运行的队列
	private ConcurrentLinkedQueue<UriInput> inputQueue = new ConcurrentLinkedQueue<UriInput>();
	//下一层运行队列
	private ConcurrentLinkedQueue<UriInput> inputQueue2 = new ConcurrentLinkedQueue<UriInput>();
	//http client
	private ClientConnectionManager httpClientCm;
	private HttpParams httpClientParams;
	private DefaultHttpClient defaultHttpClient;
	//thread manage
	private Thread poolManageThread;
	private boolean poolManageThreadShutdown=false;
	//uri handler
	private IUriInputParser uriInputParser;
	//dao
	private ICrawlerResutlDao resultDao;
	public CrawlerManager(IUriInputParser uriInputParser,ICrawlerResutlDao resultDao) {
		httpClientParams= new BasicHttpParams();
		
		
		// Increase max total connection to 200
		ConnManagerParams.setMaxTotalConnections(httpClientParams, 200);
		// Increase default max connection per route to 20
		ConnPerRouteBean connPerRoute = new ConnPerRouteBean(20);
/*		// Increase max connections for localhost:80 to 50
		HttpHost localhost = new HttpHost("locahost", 80);
		connPerRoute.setMaxForRoute(new HttpRoute(localhost), 50);*/
		ConnManagerParams.setMaxConnectionsPerRoute(httpClientParams, connPerRoute);

		
		SchemeRegistry schemeRegistry = new SchemeRegistry();
		schemeRegistry.register(new Scheme("http", PlainSocketFactory.getSocketFactory(), 80));

		httpClientCm = new ThreadSafeClientConnManager(httpClientParams, schemeRegistry);
		
		httpClientParams.setParameter(CoreProtocolPNames.HTTP_CONTENT_CHARSET, "gb2312");
		httpClientParams.setParameter(CoreProtocolPNames.USER_AGENT, "Mozilla/4.0 (compatible; MSIE 7.0b; Windows NT 6.0)");
		httpClientParams.setParameter(ClientPNames.ALLOW_CIRCULAR_REDIRECTS, true);
		httpClientParams.setParameter(ClientPNames.COOKIE_POLICY, CookiePolicy.BROWSER_COMPATIBILITY);
		httpClientParams.setParameter(CoreConnectionPNames.CONNECTION_TIMEOUT,10000);
		httpClientParams.setParameter(CoreConnectionPNames.SO_TIMEOUT,100000);
		Collection<Header> headers = new ArrayList<Header>();
		headers.add(new BasicHeader("Accept", "*/*"));
		//headers.add(new BasicHeader("Accept-Encoding", "gzip, deflate"));
		headers.add(new BasicHeader("Accept-Language", "zh-cn"));
		//headers.add(new BasicHeader("Accept-Charset", "gb2312"));
		httpClientParams.setParameter(ClientPNames.DEFAULT_HEADERS, headers); 
		
		defaultHttpClient= new DefaultHttpClient(httpClientCm, httpClientParams);
		defaultHttpClient.setHttpRequestRetryHandler(new DefaultHttpRequestRetryHandler(5,false));

		
		this.uriInputParser=uriInputParser;
		this.resultDao=resultDao;
	}
	
	public void putCrawlerSite(CrawlerSite object){
		this.crawlerSites.offer(object);
	}
	private void putCrawlerSites(Collection<CrawlerSite> objects) {
		this.crawlerSites.addAll(objects);
	}
	private void putInputUri(UriInput object) {
		this.inputQueue.offer(object);
	}
	private void putInputUris(Collection<UriInput> objects) {
		this.inputQueue.addAll(objects);
	}
	private void putInputUris2(Collection<UriInput> objects) {
		this.inputQueue2.addAll(objects);
	}
	public void shutdown(){
		poolManageThreadShutdown=true;
	}
	public boolean isShutDown(){
		return poolManageThreadShutdown;
	}
	public void start(){
		//构造一个线程池
		poolManageThreadShutdown=false;
		final ThreadPoolExecutor crawlerThreadPool=(ThreadPoolExecutor)Executors.newFixedThreadPool(10);
		poolManageThread=new Thread(new Runnable(){

			public void run() {
				logger.info("crawlManager stat");
				CrawlerSite currentSite=crawlerSites.poll();
				UriInput currentSiteUriInput=currentSite.getInput();
				final Map<String,Integer> currentCrawlerSiteState=new ConcurrentHashMap<String,Integer>();
				currentCrawlerSiteState.put("deep", 0);
				currentCrawlerSiteState.put("total", 0);
				if(currentSiteUriInput!=null) inputQueue.offer(currentSiteUriInput);
				else poolManageThreadShutdown=true;
				while(!poolManageThreadShutdown){
						logger.debug("crawlManager check [coreSize:"+crawlerThreadPool.getCorePoolSize()+" active:"+crawlerThreadPool.getActiveCount()+" complete:"+crawlerThreadPool.getCompletedTaskCount()+"]");
						if(inputQueue.size()!=0){
							for(int i=0,j=0;i<inputQueue.size() && j<10;i++){
								final UriInput input=inputQueue.poll();
								
								//对input进行预处理
								if(currentSite.isLimitSamePath()){
									/*
									String _path=input.getUri().getPath();
									String _cPath=currentSiteUriInput.getUri().getPath();
									if(!_path.contains(_cPath)) continue;
									_path=_path.replace(_cPath, "");
									String[] deep=_path.split("/");
									if(deep.length>currentSite.getDeep()) continue;*/
								}
								if(currentSite.getDeep()>0 && currentCrawlerSiteState.get("deep")>=(currentSite.getDeep()+1)){
										logger.info("crawler will stop,because current deep has exceeded the config deep:"+currentSite.getDeep());
										poolManageThreadShutdown=true;
										inputQueue.clear();
										inputQueue2.clear();
										break;

								}
								if(currentSite.isLimitSameSite()){
									try{
									if(!input.getUri().getHost().equals(currentSiteUriInput.getUri().getHost())){
										logger.info("the url:"+input.getUri().getHost() +" is not same host with "+currentSiteUriInput.getUri().getHost());
										continue;
									}
									}catch(Exception e){
										continue;
									}
								}
								if( currentSite.getTotal()>0 && currentCrawlerSiteState.get("total")>=currentSite.getTotal()){
									logger.info("crawler will stop,because total pages has exceeded the config total:"+currentSite.getTotal());
									poolManageThreadShutdown=true;
									inputQueue.clear();
									inputQueue2.clear();
									break;
								}

									j++;
									crawlerThreadPool.execute(new Runnable(){
										public void run() {
											ICrawler crawler=new DefaultCrawler(defaultHttpClient);
											try {
												if(!poolManageThreadShutdown){
													logger.info("crawler start [thread id= "+Thread.currentThread().getId()+"] for ["+input.getUri().toString()+"]");
													//crawl the input
													CrawlerResult result= crawler.doCrawl(input);
													//save the result
													String saveId=resultDao.saveCrawlerResult(result);
													if(saveId==null) return;
													currentCrawlerSiteState.put("total", currentCrawlerSiteState.get("total")+1);
													//anerlies the result,then put all url to input
													Set<UriInput> newUris=uriInputParser.doParser(result);
													putInputUris2(newUris);
												}
											} catch (CrawException e) {
												//if failed reput to the queue
												logger.error(e.getMessage());
												putInputUri(input);
											} catch (UriInputParserException e) {
												// TODO Auto-generated catch block
												e.printStackTrace();
											} catch (CrawlerResultDaoException e) {
												// TODO Auto-generated catch block
												e.printStackTrace();
											}finally{
												logger.info("crawler end [thread id= "+Thread.currentThread().getId()+"] for ["+input.getUri().toString()+"]");
											}
											
										}
										
									});
									logger.debug("crawlManager add ["+input.getUri().toString()+"] to thread.");
								
								
							}
						}else if(inputQueue2.size()==0 && crawlerThreadPool.getActiveCount()!=0){
							try {
								logger.info("current crawl queue is empty,but some thread is running,we will wait for them");
								Thread.sleep(1000);
							} catch (InterruptedException e) {
								logger.error(e.getMessage());
							}
							continue;
						}else if(inputQueue2.size()!=0){
							currentCrawlerSiteState.put("deep", currentCrawlerSiteState.get("deep")+1);
							inputQueue.addAll(inputQueue2);
							inputQueue2.clear();
							continue;
						}else{
							currentSite=crawlerSites.poll();
							if(currentSite!=null){
								currentSiteUriInput=currentSite.getInput();
								inputQueue.offer(currentSiteUriInput);
							}else{
								poolManageThreadShutdown=true;
								inputQueue.clear();
								inputQueue2.clear();
							}
							
							continue;
						}
						try {
							Thread.sleep(5000);
						} catch (InterruptedException e) {
							logger.error(e.getMessage());
						}
					
				}
				crawlerThreadPool.shutdownNow();
				poolManageThreadShutdown=true;
				logger.info("crawlManager shutdown<----------------------------------------------------------------------------------");
				
			}
			
			
		});
		poolManageThread.start();
		
	}
public static void main(String[] args) throws URISyntaxException, UnsupportedEncodingException, MalformedURLException{
	
	String url="http://www.google.cn/intl/zh-CN/about.html";
	String url1=url;
	if(url.indexOf('?')!=-1) {
		url1=url.substring(0, url.indexOf('?'));
		String url2=url.substring(url.indexOf('?')+1);
		url=url1+"?"+URLEncoder.encode(url2,"UTF-8");
	}
	
	//URI uri=new URI("http://news.sina.com.cn/w/2009-10-07/114518786581.shtml");
	URI uri1=new URI(url);
	//System.out.println(URIUtils.resolve(uri, uri1).toString());
	
	CrawlerManager m=CrawlServiceProvider.getInstance().getCrawlerManager();
	
	CrawlerSite site1=new CrawlerSite();
	site1.setLimitSamePath(true);
	//site1.setTotal(10);
	UriInput i1=new UriInput();
	i1.setAnchorText("新浪新闻");
	i1.setUri(uri1);
	site1.setInput(i1);
//	for(int i=0;i<1;i++){
		
		m.putCrawlerSite(site1);
//	}
//	
	m.start();
//	m.shutdown();
	
}
}
