package org.rency.crawler.fetch.service.parser.impl;

import java.io.IOException;
import java.util.HashSet;
import org.apache.commons.lang.StringUtils;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.rency.common.utils.domain.Extension;
import org.rency.crawler.common.enums.HttpMethod;
import org.rency.crawler.common.exception.CrawlerException;
import org.rency.crawler.domain.http.FetchHttpRequest;
import org.rency.crawler.domain.http.FetchHttpResponse;
import org.rency.crawler.domain.queue.FetchQueue;
import org.rency.crawler.fetch.helper.BuilderHelper;
import org.rency.crawler.fetch.helper.FetchCarrier;
import org.rency.crawler.fetch.helper.FetchHelper;
import org.rency.crawler.plugins.url.NormalizeUrl;
import org.springframework.stereotype.Service;

/**
 * Jsoup解析
 * @author: user_rcy@163.com
 * </br>Created on 2016年6月23日
 * @version: V1.0.0
 * @since JDK 1.8
 */
@Service
public class JsoupParserServiceImpl extends AbstractParserService<Document> {

	@Override
	protected Document getDocument(FetchHttpRequest request, FetchHttpResponse response) throws IOException {
		return Jsoup.parse(new String(response.getContent(),response.getCharset()),request.getHost());
	}

	@Override
	protected boolean fetchSupport(Document document, FetchHttpResponse response) {
		return true;
	}

	@Override
	protected void parserHref(Document document, FetchCarrier carrier, HashSet<FetchQueue> fetchQueueCollection) throws CrawlerException {
		logger.trace("提取[Href]目标,来自[{}].",carrier.getHttpRequest().getUrl());
		long start = System.currentTimeMillis();
		try{
			Elements elements = document.select("a[href]");
			elements.addAll(document.select("div[href]"));
			elements.parallelStream().forEach(element ->{
				NormalizeUrl normalizeUrl = this.urlDetermine(element.attr("abs:href").trim(),carrier,true);
				if(normalizeUrl == null){
					return;
				}
				logger.trace("发现[Href]目标:{},来自:{}",normalizeUrl.getNormalizeUri(),element);
				fetchQueueCollection.add(this.build(normalizeUrl, HttpMethod.GET,null,carrier));
			});
		}finally{
			logger.debug("提取[Href]目标完成, 耗时:[{}ms], 来自来源[{}].",(System.currentTimeMillis() - start),carrier.getHttpRequest().getUrl());
		}
	}
	
	@Override
	protected void parserFrame(Document document, FetchCarrier carrier, HashSet<FetchQueue> fetchQueueCollection) throws CrawlerException {
		logger.trace("提取[Frame]目标,来自[{}].",carrier.getHttpRequest().getUrl());
		long start = System.currentTimeMillis();
		try{
			Elements elements = document.select("iframe[src]");
			elements.parallelStream().forEach( element ->{
				NormalizeUrl normalizeUrl = this.urlDetermine(element.attr("abs:src").trim(),carrier,true);
				if(normalizeUrl == null){
					return;
				}

				logger.trace("发现[Frame]目标:{},来自:{}",normalizeUrl.getNormalizeUri(),element);
				fetchQueueCollection.add(this.build(normalizeUrl, HttpMethod.GET,null,carrier));
			});
		}finally{
			logger.debug("提取[Frame]目标完成, 耗时:[{}ms], 来自来源[{}].",(System.currentTimeMillis() - start),carrier.getHttpRequest().getUrl());
		}
	}

	@Override
	protected void parserForm(Document document, FetchCarrier carrier, HashSet<FetchQueue> fetchQueueCollection) throws CrawlerException {
		logger.trace("提取[Form]目标,来自[{}].",carrier.getHttpRequest().getUrl());
		long start = System.currentTimeMillis();
		try{
			Elements elements = document.select("form[action]");
			elements.parallelStream().forEach(element -> {
				NormalizeUrl normalizeUrl = this.urlDetermine(element.attr("abs:action").trim(),carrier,true);
				if(normalizeUrl == null){
					return;
				}

				Extension postParam = new Extension();
				Elements inputs = element.select("input");
				String formKey;
				String formValue;
				for(Element input : inputs){
					formKey = input.attr("name");
					formValue = input.attr("value");
					if(StringUtils.isBlank(formKey) || StringUtils.isBlank(formValue)){
						continue;
					}
					postParam.add(formKey, formValue);
				}
				logger.trace("发现[Form]目标:{},来自:{}",normalizeUrl.getNormalizeUri(),element);
				fetchQueueCollection.add(this.build(normalizeUrl, HttpMethod.POST,postParam,carrier));
			});
		}finally{
			logger.debug("提取[Form]目标完成, 耗时:[{}ms], 来自来源[{}].",(System.currentTimeMillis() - start),carrier.getHttpRequest().getUrl());
		}
	}

	@Override
	protected void parserStyle(Document document, FetchCarrier carrier, HashSet<FetchQueue> fetchQueueCollection) throws CrawlerException {
		logger.trace("提取[Style]目标,来自[{}].",carrier.getHttpRequest().getUrl());
		long start = System.currentTimeMillis();
		try{
			Elements elements = document.select("link[href]");
			elements.parallelStream().forEach(element -> {
				NormalizeUrl normalizeUrl = this.urlDetermine(element.attr("abs:href").trim(),carrier,false);
				if(normalizeUrl == null){
					return;
				}

				logger.trace("发现[Style]目标:{},来自:{}",normalizeUrl.getNormalizeUri(),element);
				fetchQueueCollection.add(this.build(normalizeUrl, HttpMethod.GET,null,carrier));
			});
		}finally{
			logger.debug("提取[Style]目标完成, 耗时:[{}ms], 来自来源[{}].",(System.currentTimeMillis() - start),carrier.getHttpRequest().getUrl());
		}
	}

	@Override
	protected void parserImages(Document document, FetchCarrier carrier, HashSet<FetchQueue> fetchQueueCollection) throws CrawlerException {
		logger.trace("提取[Img]目标,来自[{}].",carrier.getHttpRequest().getUrl());
		long start = System.currentTimeMillis();
		try{
			Elements elements = document.select("img[src]");
			elements.parallelStream().forEach(element -> {
				NormalizeUrl normalizeUrl = this.urlDetermine(element.attr("abs:src").trim(),carrier,false);
				if(normalizeUrl == null){
					return;
				}

				logger.trace("发现[Img]目标:{},来自:{}",normalizeUrl.getNormalizeUri(),element);
				fetchQueueCollection.add(this.build(normalizeUrl, HttpMethod.GET,null,carrier));
			});
		}finally{
			logger.debug("提取[Img]目标完成, 耗时:[{}ms], 来自来源[{}].",(System.currentTimeMillis() - start),carrier.getHttpRequest().getUrl());
		}
	}

	@Override
	protected void parserScript(Document document, FetchCarrier carrier, HashSet<FetchQueue> fetchQueueCollection) throws CrawlerException {
		logger.trace("提取[Script]目标,来自[{}].",carrier.getHttpRequest().getUrl());
		long start = System.currentTimeMillis();
		try{
			Elements elements = document.select("script[src]");
			elements.parallelStream().forEach(element -> {
				NormalizeUrl normalizeUrl = this.urlDetermine(element.attr("abs:src").trim(),carrier,false);
				if(normalizeUrl == null){
					return;
				}

				logger.trace("发现[Script]目标:{},来自:{}",normalizeUrl.getNormalizeUri(),element);
				fetchQueueCollection.add(this.build(normalizeUrl, HttpMethod.GET,null,carrier));
			});
		}finally{
			logger.debug("提取[Script]目标完成, 耗时:[{}ms], 来自来源[{}].",(System.currentTimeMillis() - start),carrier.getHttpRequest().getUrl());
		}
	}

	/**
	 * URI判断
	 * <ol>
	 *     <li>序列化</li>
	 *     <li>相同网站判断</li>
	 *     <li>深度判断</li>
	 * </ol>
	 * @param url
	 * @param carrier
     * @return
     */
	private NormalizeUrl urlDetermine(String url, FetchCarrier carrier, boolean needSameDomain){
		if(StringUtils.isBlank(url) || url.equalsIgnoreCase(carrier.getHttpRequest().getUrl())){
			return null;
		}
		//序列化URI地址
		NormalizeUrl normalize = this.urlService.normalize(url);
		if(normalize == null){
			return null;
		}

		//是否限制抓取同一网站,且是否来自同一网站
		if(needSameDomain && this.fetchSameSite() && !this.urlService.sameDomain(normalize,carrier.getHttpRequest().getHost())){
			return null;
		}

		//抓取深度判断
		if(!this.urlService.intoLeaf(normalize, carrier.getExtension().getFetchDepth())){
			return null;
		}
		return normalize;
	}

	private FetchQueue build(NormalizeUrl normalizeUrl, HttpMethod httpMethod, Extension parameter, FetchCarrier carrier){
		String fetchUrl = normalizeUrl.getNormalizeUri();
		if(parameter == null){
			parameter = new Extension();
		}
		if(HttpMethod.GET == httpMethod){
			if(normalizeUrl.getOrgiUri().contains("?")){
				int endIndex = normalizeUrl.getOrgiUri().indexOf("?");
				fetchUrl = StringUtils.substring(normalizeUrl.getOrgiUri(), 0, endIndex);
				FetchHelper
						.convertUrlParam(StringUtils.substring(normalizeUrl.getOrgiUri(), endIndex+1, normalizeUrl.getOrgiUri().length()),parameter);
			}
		}
		return BuilderHelper.build(fetchUrl,normalizeUrl.getHost(),httpMethod,parameter,carrier.getExtension().getFetchDepth(),carrier.getExtension().getMaxRetryTimes(),null);
	}
	
	@Override
	protected String getPageTitle(Document content, FetchHttpResponse response) {
		return StringUtils.isBlank(content.title()) ? content.head().select("title").text() : content.title();
	}

	@Override
	protected String getPageDescription(Document content, FetchHttpResponse response) {
		return content.head().select("meta[name=description]").attr("content");
	}

	@Override
	protected String getPageKeywords(Document content, FetchHttpResponse response) {
		return content.head().select("meta[name=keywords]").attr("content");
	}

	@Override
	protected byte[] getPageText(Document content, FetchHttpResponse response) {
		return content.text().getBytes(response.getCharset());
	}

	@Override
	protected byte[] getPageHtml(Document document, FetchHttpResponse response) {
		return response.getContent();
	}
}
