package test.spider;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;

import org.elasticsearch.common.collect.CopyOnWriteHashSet;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;

import jodd.http.HttpRequest;
import jodd.http.HttpResponse;

public class Spider {
	
	static ExecutorService threadPool = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
	static CopyOnWriteHashSet<String> linkSet = new CopyOnWriteHashSet<String>();
	
	protected Document doc;
	protected String baseUrl;
	
	static AtomicInteger totalLinks = new AtomicInteger(0);
	static AtomicInteger errorLinks = new AtomicInteger(0);
	
	static LinkedBlockingQueue<Object[]> queue = new LinkedBlockingQueue<>();
	
	public static String regex;
	static DetailPageProcessor processor = new DetailPageProcessor() {
		@Override
		public void process(Object[] objs) {
			System.out.println(objs[0] + "--->" + objs[1]);
		}
	};
	
	public static Spider me(String baseUrl) {
		return new Spider(baseUrl);
	}
	
	public Spider(String baseUrl) {
		this.baseUrl = baseUrl;
		System.out.println("send http request:" + baseUrl + "...");
		try {
			totalLinks.incrementAndGet();
//			Thread.sleep(1000);
			HttpResponse response = HttpRequest.get(baseUrl).charset("utf-8").send();
			String html = response.charset("utf-8").bodyText();
			doc = Jsoup.parse(html);
		} catch (Exception e) {
			System.err.println(e.getMessage());
			errorLinks.incrementAndGet();
		}
	}
	
	static List<String> excludeLinks = Arrays.asList(new String[]{"","#","javascript:;","javascript:void(0);"});
	
	protected Set<String> collectLinks() {
		if(doc==null) return Collections.emptySet();
		return doc.select("a[href]")
			.stream()
			.map(e->e.attr("href").trim())
			.filter(e->!excludeLinks.contains(e))
			.collect(Collectors.toSet());
	}
	
	public static void listen() {
		new Thread() {
			@Override
			public void run() {
				while(true) {
					try {
						processor.process(queue.take());
					} catch (InterruptedException e) {
						e.printStackTrace();
					}
				}
			}
		}.start();
	}
	
	public static void crawling(List<String> parentLinks, String baseUrl) {
		int size = parentLinks.size();
		System.out.println("我是第"+size+"层");
		if(size==2) {
			System.out.println("退出！！");
			return;
		}
		Set<String> links = me(baseUrl).collectLinks();
		for(String link : links) {
			List<String> temp = new ArrayList<>(parentLinks);
			if(linkSet.contains(link)) return;
			else linkSet.copyAndAdd(link);
			if(link.matches(regex)) {
				//抓取内容的逻辑
				try {
					queue.put(new Object[]{temp, link});
				} catch (InterruptedException e) {
					e.printStackTrace();
				}
			} else {
				try {
					Thread.sleep(1000);
				} catch (InterruptedException e) {
					e.printStackTrace();
				}
				temp.add(link);
				threadPool.submit(new Runnable() {
					@Override
					public void run() {
//						crawling(temp, link);
						HttpResponse response = HttpRequest.get("http://licai.51credit.com/article/list?channel_id=0&start=10").send();
						System.out.println(response.statusCode());
					}
				});
			}
		}
	}
	
}
