package com.gooznt.buscaprop;

import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.List;
import java.util.regex.Pattern;

import edu.uci.ics.crawler4j.crawler.CrawlController;

public class CrawlerExecutor {
	
	public static FileWriter fw;
	private String root;
	private static String domain;
	private static Pattern detailsLinkPattern;
	private int numberOfThreads;
	private static Long maxDetailsFound = new Long(1);
	public static Long detailsFound = new Long(0);
	private List<String> seeds;
	
	/**
	 * @param filePath is the file where the details url's will be saved
	 * @param root is the file where crawler4j puts the logs
	 */
	public CrawlerExecutor(String filePath, String root,
							String domain2, String detailsLinkPattern2,
							List<String> seeds2, int numberOfThreads2,
							long maxDetailsFound2) {
		
		this.root = root;
		domain = domain2;
		seeds = seeds2;
		detailsLinkPattern = Pattern.compile(detailsLinkPattern2, Pattern.DOTALL | Pattern.CASE_INSENSITIVE);
		numberOfThreads = numberOfThreads2;
		maxDetailsFound = maxDetailsFound2;
		
		try {
			fw = new FileWriter(new File(filePath));
		} catch (IOException e) {
			e.printStackTrace();
		}
	}
	
	/**
	 * @param crawler is the class that extends of BaseCrawler
	 * @param seeds
	 * @param numberOfThreads
	 */
	public void executeCrawler() {
		
		try {
			
			CrawlController controller = new CrawlController(root);
			
			for (String s : seeds) {
				controller.addSeed(s);
			}
			
			controller.start(GobyCrawler.class, numberOfThreads);
			
		} catch (Exception e) {
			e.printStackTrace();
		}
	}
	
	public static String getDomain() {
		return domain;
	}
	
	public static Pattern getDetailsLinkPattern() {
		return detailsLinkPattern;
	}
	
	public static Long getMaxDetailsFound() {
		return maxDetailsFound;
	}
	
}
