package com.gooznt;

import java.util.ArrayList;
import java.util.List;

import com.gooznt.buscaprop.CrawlerExecutor;

public class Goby {
	
	/**
	 * Method that is used to get the domain for the page is being crawled
	 * @param none
	 * @return domain url to be used by shouldVisit method, in order to check if the crawler
	 * does not leave the site
	 */
	private static String domain;
	
	private static List<String> seeds = new ArrayList<String>();
	
	private static String detailsLinkPattern;
	
	private static String outputFileName;

	private static int threads;
	
	private static Long maxDetailsFound;
	
	/**
	 * @param args should be the input file name to open and get the url to crawl, outputFileName, etc
	 * */
	public static void main(String[] args) {
		
		initializeParameters(args[0]);
		
		if ( ! ValidationUtils.validateURL(domain)
			&& validateSeeds(seeds)
			&& ValidationUtils.validateDetailsLinkPattern(detailsLinkPattern)
			&& ValidationUtils.validateFileName(outputFileName)) {
			
			throw new RuntimeException("wrong parameters on config file");
		}
		
		CrawlerExecutor crawlerExecutor = new CrawlerExecutor(outputFileName, "outputFolderName",
															domain, detailsLinkPattern,
															seeds, threads, maxDetailsFound);
		
		crawlerExecutor.executeCrawler();
	}
	
	private static boolean validateSeeds(List<String> seeds2) {
		
		boolean valid = true;
		
		for (String seed : seeds2) {
			valid &= ValidationUtils.validateURL(seed);
		}
		
		return valid;
	}

	private static void initializeParameters(String inputFileName) {
		
		if (inputFileName == null) {
			throw new RuntimeException("input file parameter is wrong");
		}
		
		// we need to open the file and set the variables line by line
		// I'm setting dummy values to can continue with it
		
		domain = "http://www.animecons.com";
		detailsLinkPattern = "http://www.animecons.com";
		outputFileName = "animecons.txt";
		seeds.add("http://www.animecons.com");
		threads = 3;
		maxDetailsFound = new Long(1);
	}
}
