package com.kalman.common.spider;

import static com.google.common.base.Preconditions.checkNotNull;

import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;

import com.google.common.base.Throwables;
import com.google.common.collect.Maps;


public class DefaultWebSpider implements WebSpider{
	/**爬取页面需要设置的一些参数*/
	private WebSpiderParam param;
	/**爬取页面的监听*/
	private BatchWebSpiderListener listeners;
	/**存储未处理URL*/
	private List<String> unVisitedUrls = Collections.synchronizedList(new ArrayList<String>());
	/**存储访问了的URL*/
	private Set<String> visitedUrls = Collections.synchronizedSet(new HashSet<String>());
	/**存储所有URL深度*/
	private Map<String, Integer> urlsDeep = Maps.newHashMap();
	
	private AtomicLong success = new AtomicLong();
	private AtomicLong failed = new AtomicLong();
	
	public DefaultWebSpider(WebSpiderParam param){
		checkNotNull(param,"WebSpiderParam can't be null!");
		param.check();
		this.param = param;
		this.listeners = mergeListeners(param.getListeners());
	}
	
	private BatchWebSpiderListener mergeListeners(final Set<WebSpiderListener> listeners) {
		Set<WebSpiderListener> spiderListeners = new HashSet<WebSpiderListener>();
		//spiderListeners.add(new DefaultWebSpiderListener());
		if (listeners != null){
			spiderListeners.addAll(listeners);
		}
		return new BatchWebSpiderListener(spiderListeners);
	}

	private synchronized String getAUnProcessedUrl() {
		String tmpAUrl = unVisitedUrls.get(0);
		unVisitedUrls.remove(0);
		return tmpAUrl;
	}

	public void doSpider() { // 由用户提供的域名站点开始，对所有链接页面进行抓取
		try {
			String url = UrlHelper.formatLink(param.getBaseUrl());
			unVisitedUrls.add(url);
			visitedUrls.add(url);
			urlsDeep.put(url, 1);

			listeners.onStarted(param);
			for (int i = 0; i < param.getThread(); i++) {
				new Thread(new Processer(this)).start();//开始制定爬取页面的线程
			}
			while(true){
				if(unVisitedUrls.isEmpty()){
					listeners.onEnd(success.get(), failed.get());
					break;
				}
			}
		} catch (Exception e) {
			listeners.onException(e);
			throw Throwables.propagate(e);
		}
	}
	
	public void setBaseLink(String baseLink){
		param.setBaseUrl(baseLink);
	}

	
	/**
	 * 对后续解析出的url进行抓取
	 */
	private void getWebpageByLink(String baseLink) { 
		try {
			int tempDeep = urlsDeep.get(baseLink);
			if(tempDeep > param.getDeep()){
				return;
			}
			WebPage webPage = new WebPage(baseLink,tempDeep);
			if (webPage.hasHttpLinks()) {
				listeners.handleWebPage(webPage);
				Set<String> allLinks = webPage.getHttpLinkList();
				if(param.getLinkRegex() != null){
					 allLinks= webPage.getMatchedLinkList(param.getLinkRegex());
				}
				for (String link : allLinks) {
					String url = UrlHelper.formatLink(link);
					if (isNewUrl(url)) {
						unVisitedUrls.add(url);
						visitedUrls.add(url);
						urlsDeep.put(url, (urlsDeep.get(baseLink) + 1));
						success.incrementAndGet();
						if(urlsDeep.get(url) <= param.getDeep()){
							getWebpageByLink(url);
						}
					}
				}
			}
			
		} catch (Exception e) {
			failed.decrementAndGet();
			listeners.onException(e);
		}
	}
	
	private boolean isNewUrl(String link) throws MalformedURLException{
		return !visitedUrls.contains(link);
	}
	

	class Processer implements Runnable { // 独立的抓取线程
		DefaultWebSpider spider;
		
		public Processer(DefaultWebSpider spider) {
			this.spider = spider;
		}

		public void run() {
			while (!unVisitedUrls.isEmpty()) {
				getWebpageByLink(getAUnProcessedUrl());
			}
		}
	}
}