package com.yx.crawler.template;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import com.yx.crawler.analyse.SimilarityAnalyse;
import com.yx.crawler.analyse.SimilarityAnalyse.Similarity;
import com.yx.crawler.analyse.SimilarityModel;
import com.yx.crawler.runtime.HttpProcessor;
import com.yx.crawler.typedef.CrawlerListener;
import com.yx.crawler.typedef.CrawlerPage;
import com.yx.crawler.typedef.CrawlerUrl;
import com.yx.crawler.util.TextExtract;

public abstract class BaiduCrawlerListener implements CrawlerListener {
	public BaiduCrawlerListener() {
	}

	public List<CrawlerUrl> crawlered(CrawlerPage crawlerPage) {
		List<BaiduSearchItem> baiduSearchItems = parser(crawlerPage);
		return crawlered(baiduSearchItems);
	}

	public abstract List<CrawlerUrl> crawlered(List<BaiduSearchItem> baiduSearchItems);

	private List<BaiduSearchItem> parser(CrawlerPage crawlerPage) {
		List<BaiduSearchItem> baiduSearchItems = new ArrayList<BaiduSearchItem>();
		BaiduCrawlerUrl baiduCrawlerUrl = (BaiduCrawlerUrl) crawlerPage.getCrawlerUrl();
		int pageIndex = baiduCrawlerUrl.pageIndex();
		Document document = crawlerPage.document();
		int total = getBaiduSearchResultCount(document);
		int len = total < 10 ? total : 10;
		for (int i = 0; i < len; i++) {
			String titleCssQuery = "html body div div div div#content_left div#" + (i + 1 + (pageIndex - 1) * 10)
					+ ".result.c-container h3.t a";
			String summaryCssQuery = "html body div div div div#content_left div#" + (i + 1 + (pageIndex - 1) * 10)
					+ ".result.c-container div.c-abstract";
			Element titleElement = document.select(titleCssQuery).first();
			String href = "";
			String titleText = "";
			if (titleElement != null) {
				titleText = titleElement.text();
				href = titleElement.attr("href");
			} else {
				// 处理百度百科
				titleCssQuery = "html body div#out div#in div#wrapper div#container div#content_left div#1.result-op h3.t a";
				summaryCssQuery = "html body div#out div#in div#wrapper div#container div#content_left div#1.result-op div p";
				titleElement = document.select(titleCssQuery).first();
				if (titleElement != null) {
					titleText = titleElement.text();
					href = titleElement.attr("href");
				}
			}
			Element summaryElement = document.select(summaryCssQuery).first();
			// 处理百度知道
			if (summaryElement == null) {
				summaryCssQuery = summaryCssQuery.replace("div.c-abstract", "font");
				summaryElement = document.select(summaryCssQuery).first();
			}
			String summaryText = "";
			if (summaryElement != null) {
				summaryText = summaryElement.text();
			}

			if (titleText != null && !"".equals(titleText.trim()) && summaryText != null
					&& !"".equals(summaryText.trim())) {
				BaiduSearchItem baiduSearchItem = new BaiduSearchItem(titleText, href, summaryText, crawlerPage);
				baiduSearchItems.add(baiduSearchItem);
			}
		}
		return baiduSearchItems;
	}

	private int getBaiduSearchResultCount(Document document) {
		String cssQuery = "html body div div div div.nums";
		Element totalElement = document.select(cssQuery).first();
		String totalText = totalElement.text();
		String regEx = "[^0-9]";
		Pattern pattern = Pattern.compile(regEx);
		Matcher matcher = pattern.matcher(totalText);
		totalText = matcher.replaceAll("");
		int total = Integer.parseInt(totalText);
		return total;
	}

	public class BaiduSearchItem {
		String title, href, summary;
		CrawlerPage parent,page;

		private BaiduSearchItem(String title, String href, String summary, CrawlerPage parent) {
			this.title = title;
			this.href = href;
			this.summary = summary;
			this.parent = parent;
		}

		public String title() {
			return title;
		}

		public String href() {
			return href;
		}

		public String summary() {
			return summary;
		}

		public CrawlerPage parent() {
			return parent;
		}

		public String[] keywords() {
			return ((BaiduCrawlerUrl) parent().getCrawlerUrl()).keywords();
		}

		public int baiduPageIndex() {
			return ((BaiduCrawlerUrl) parent().getCrawlerUrl()).pageIndex();
		}

		public Map<String, Object> meta() {
			return parent().getCrawlerUrl().getMeta();
		}
		public String trueUrl()throws Exception{
			return page().trueUrl();
		}

		public CrawlerPage page() throws Exception {
			if(page==null){
				page= HttpProcessor.get(href(), meta());
			}
			return page;
		}

		public String text()throws Exception {
			return page().content();
		}
	}
}
