package me.pearjelly.crawler;

import java.io.ByteArrayInputStream;
import java.util.regex.Pattern;

import me.pearjelly.filters.VisitFilter;
import me.pearjelly.filters.commons.DepthFilter;
import me.pearjelly.filters.commons.DomainFilter;
import me.pearjelly.filters.commons.ResetUrlFilter;
import me.pearjelly.filters.commons.UrlFilter;
import me.pearjelly.model.Crawler;
import me.pearjelly.model.Image;
import me.pearjelly.model.PageContext;
import me.pearjelly.util.ImageQueue;
import me.pearjelly.util.JpegMetadataUtil;
import me.pearjelly.util.PageContextUtil;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.drew.metadata.Metadata;

import edu.uci.ics.crawler4j.crawler.Page;
import edu.uci.ics.crawler4j.crawler.WebCrawler;
import edu.uci.ics.crawler4j.url.WebURL;

public class DefaultImgCrawler extends WebCrawler {

	public static void configure(Crawler crawler) {
		DefaultImgCrawler.crawler = crawler;
		String urlResetReg = crawler.getUrlResetReg();
		if (null != urlResetReg && urlResetReg.trim().length() > 0) {
			visitFilter = new ResetUrlFilter(urlResetReg, visitFilter);
		}
		visitFilter = new DepthFilter(
				crawler.getDepth() > 0 ? crawler.getDepth() : 3, visitFilter);
		visitFilter = new UrlFilter(
				crawler.getFilterReg() != null ? crawler.getFilterReg()
						: ".*(\\.(css|js|mid|mp2|mp3|mp4|wav|avi|mov|mpeg|ram|m4v|pdf|rm|smil|wmv|swf|wma|zip|rar|gz|bmp|gif|png|tiff?))$",
				visitFilter);
		visitFilter = new DomainFilter(crawler.getDomain(), visitFilter);
		endPagePatterns = crawler.getEndPageReg() != null ? Pattern
				.compile(crawler.getEndPageReg()) : Pattern
				.compile(".*(\\.s?html?)$");
		imgPatterns = crawler.getImgReg() != null ? Pattern.compile(crawler
				.getImgReg()) : Pattern.compile(".*(\\.(jpe?g))$");
		minWidth = crawler.getMinWidth() > 0 ? crawler.getMinWidth() : 300;
		minHeight = crawler.getMinHeight() > 0 ? crawler.getMinHeight() : 300;
	}

	@Override
	public boolean shouldVisit(WebURL webURL) {
		try {
			return visitFilter.filter(webURL)
					&& (endPagePatterns.matcher(webURL.getURL().toLowerCase())
							.matches() || imgPatterns.matcher(
							webURL.getURL().toLowerCase()).matches());
		} catch (Exception e) {
			e.printStackTrace();
			return false;
		}
	}

	@Override
	public void visit(Page page) {
		try {
			String url = page.getWebURL().getURL().toLowerCase();
			PageContext pageContext = new PageContext(page);
			logger.debug("Visit page:{}", url);
			if (!page.isBinary() && endPagePatterns.matcher(url).matches()) {
				PageContextUtil.putInCache(page.getWebURL().getDocid(),
						pageContext);
				logger.info("Cached endPage:{}", url);
			} else if (page.isBinary() && imgPatterns.matcher(url).matches()) {
				ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(
						page.getBinaryData());
				Metadata metadata = JpegMetadataUtil
						.readMetadata(byteArrayInputStream);
				int width = JpegMetadataUtil.getWith(metadata);
				int height = JpegMetadataUtil.getHeight(metadata);
				if (PageContextUtil.existImage(pageContext)) {
					logger.info("Ignored exist image:{}", url);
				} else if (width > minWidth && height > minHeight) {
					Image image = new Image(pageContext, metadata, crawler);
					ImageQueue.offer(image);
					logger.info("Offered for process image:{}", image);
				} else {
					logger.info("Ignored small image:{}", url);
				}
			}
		} catch (Exception e) {
			e.printStackTrace();
		}
	}

	private static final Logger logger = LoggerFactory
			.getLogger(DefaultImgCrawler.class);
	private static Crawler crawler;
	private static Pattern endPagePatterns;
	private static Pattern imgPatterns;
	private static VisitFilter visitFilter;
	private static int minWidth;
	private static int minHeight;

}
