package com.laivi.basic.module.crawler;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;

import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;

import com.laivi.basic.common.listener.CrawlingBack;
import com.laivi.basic.model.crawler.BloomFilter;


/**
 * @author laivi.zhu@gmail.com
 * @date 2013-10-9
 * description:数据爬虫
 */
public class Crawler{
	private CountDownLatch latch;
	private ExecutorService pool;
	private int threadNum;
	
	public Crawler() {
		this(10);
	}

	public Crawler(int threadNum) {
		this.threadNum=threadNum;
		pool=Executors.newFixedThreadPool(threadNum);
	}

	 /**
     * 数据爬虫
     * @date 2013-10-9
     * @param parser：爬取数据对象
     * @param max:爬取最大数量
     */
    public void crawling(Parsers... parsers){
    	latch=new CountDownLatch(parsers.length);
    	for(int i=0;i<parsers.length;i++){
    		pool.execute(new CrawlerThread(parsers[i],this.latch));
    	}
    	try {
			latch.await();
		} catch (InterruptedException e) {
			e.printStackTrace();
		}finally{
			pool.shutdown();
		}
    }
    
    public void crawling(String startUrl,int maxQueue){
    	latch=new CountDownLatch(this.threadNum);
    	LinkedBlockingQueue<String> parseredQueue = new LinkedBlockingQueue<String>(maxQueue);
    	LinkedBlockingQueue<String> parseringQueue = new LinkedBlockingQueue<String>(maxQueue);
    	BloomFilter parseredfilter=new BloomFilter();
    	BloomFilter parseringfilter=new BloomFilter();
    	try {
    		parseringQueue.put(startUrl);
    		new Thread(new CrawlerUrl(parseredQueue,parseringQueue,parseringfilter)).start();
    		for(int i=0;i<this.threadNum;i++){
    			pool.execute(new SaveUrlDoc(parseredQueue,parseredfilter,this.latch));
    		}
    		latch.await();
		} catch (InterruptedException e) {
			e.printStackTrace();
		}finally{
			pool.shutdown();
		}
    }
    
    public void crawling(String url,String regex,String path,CrawlingBack back){
    	BufferedWriter bw=null;
    	try {
    		Document doc = Jsoup.connect(url).get();
    		bw=new BufferedWriter(new FileWriter(new File(path),false));
    		for(Map.Entry<String,String> entry:back.crawleUrl(doc).entrySet()){
    			if (entry.getKey().matches(regex)){
    				try{
						Document docUrl = Jsoup.connect(entry.getKey()).timeout(5*1000).get();
						bw.write(entry.getValue());
						bw.newLine();
						wirteLine(bw,back.crawleContent(docUrl));
						bw.newLine();
					}catch(Exception e){
						e.printStackTrace();
					}
    			}
    		}
    		bw.flush();
    	} catch (IOException e) {
			e.printStackTrace();
		}finally{
			if(bw!=null){
				try {
					bw.close();
				} catch (IOException e) {
					e.printStackTrace();
				}
			}
			
		}
    }
    
    public void wirteLine(BufferedWriter bw,String html) throws IOException{
		String[] lines=html.split("</p>");
		for(String line:lines){
			wirteLine(bw,line,"br");
			bw.write(line.replaceAll("<p>", ""));
			bw.newLine();
		}
	}
    
    public void wirteLine(BufferedWriter bw,String html,String tag) throws IOException{
		String[] lines=html.split("</"+tag+">");
		for(String line:lines){
			bw.write(line.replaceAll("<"+tag+">", ""));
			bw.newLine();
		}
	}

}
