/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */
package utfpr.cp.crawler;

import edu.uci.ics.crawler4j.crawler.CrawlConfig;
import edu.uci.ics.crawler4j.crawler.CrawlController;
import edu.uci.ics.crawler4j.fetcher.PageFetcher;
import edu.uci.ics.crawler4j.robotstxt.RobotstxtConfig;
import edu.uci.ics.crawler4j.robotstxt.RobotstxtServer;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.HashSet;
import java.util.List;
import java.util.Properties;
import java.util.logging.Level;
import java.util.logging.Logger;
import semanticrawler.Main;

/**
 *
 * @author COINF-PROFESSOR-07
 */
public class ControllerCrawler {
    final int delay = 2000;
    final Properties proxyConfig = new Properties();
    String configProxy = "";
    private int numberOfCrawlers;
    String crawlStorageFolder;
    private boolean restrictMode;
    HashSet<String> pages;
    CrawlController controller;
    
    public ControllerCrawler() {
         crawlStorageFolder = "C:/web";
         numberOfCrawlers = 7;

         pages = new HashSet<>();
         restrictMode = false;
         CrawlerStats.resetList();
         SemanticCrawler.clearURL();
    }
    
    public ControllerCrawler(String folder, int threads, String pathconfig, boolean restrictMode) {
        crawlStorageFolder = folder;
        numberOfCrawlers = threads;
        configProxy = pathconfig;
        pages = new HashSet<>();
        this.restrictMode = restrictMode;
        CrawlerStats.resetList();
        SemanticCrawler.clearURL();
    }
    
    public void loadProxyConfig(String path) {
        File config = new File(path);
        if (config.exists() && config.canRead()) {
            FileInputStream input = null;
            try {
		input = new FileInputStream(config);
		proxyConfig.load(input);
            } catch (IOException ex) {
		proxyConfig.clear();
                Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
            } finally {
		if (input != null) {
                    try {
                        input.close();
                    } catch (IOException e) {}
		}
                if (!proxyConfig.containsKey("username") || !proxyConfig.containsKey("password") ) {
                    proxyConfig.clear();
                }
            }
        }
    }
    
    public void addSite(String url) {
        pages.add(url);
    }
    
    public void run() throws Exception {
        CrawlConfig config = new CrawlConfig();
        config.setCrawlStorageFolder(crawlStorageFolder);
        //config.setPolitenessDelay(delay);
        
        if (!proxyConfig.isEmpty()) {
            config.setProxyHost(proxyConfig.getProperty("host", "proxy.cp.utfpr.edu.br"));
            config.setProxyPort(Integer.parseInt(proxyConfig.getProperty("port", "3128")));
            config.setProxyUsername(proxyConfig.getProperty("username"));
            config.setProxyPassword(proxyConfig.getProperty("password"));
        }
        
        /*
         * Instantiate the controller for this crawl.
         */
        PageFetcher pageFetcher = new PageFetcher(config);
        RobotstxtConfig robotstxtConfig = new RobotstxtConfig();
        RobotstxtServer robotstxtServer = new RobotstxtServer(robotstxtConfig, pageFetcher);
        controller = new CrawlController(config, pageFetcher, robotstxtServer);

        
        
        /*
         * For each crawl, you need to add some seed urls. These are the first
         * URLs that are fetched and then the crawler starts following links
         * which are found in these pages
         */
        for (String url : pages) {
            controller.addSeed(url);
            if (isRestrictMode()) {
                System.out.println(url);
                SemanticCrawler.addURL(url);
            }
        }
        
        /*
         * Start the crawl. This is a blocking operation, meaning that your code
         * will reach the line after this only when crawling is finished.
         */
        controller.startNonBlocking(SemanticCrawler.class, getNumberOfCrawlers());    
    }

    public boolean isFinished() {
        return controller.isFinished();
    }
    
    public List<CrawlerStats> getStatisticsList() {
        return CrawlerStats.getList();
    }
    
    public CrawlerStats getResumoAtual() {
        CrawlerStats resumo = new CrawlerStats(0);
        long time = -1;
        for (CrawlerStats thread : CrawlerStats.getList()) {
            resumo.setTotalDatabaseErrors(resumo.getTotalDatabaseErrors() + thread.getTotalDatabaseErrors());
            resumo.setTotalErrors(resumo.getTotalErrors() + thread.getTotalErrors());
            resumo.setTotalProcessedURL(resumo.getTotalProcessedURL() + thread.getTotalProcessedURL());
            resumo.setTotalVisitedPages(resumo.getTotalVisitedPages()+ thread.getTotalVisitedPages());
            resumo.setTotalSavedPages(resumo.getTotalSavedPages()+ thread.getTotalSavedPages());
            if (thread.getTotalSavedPages() > 0) {
                if (time == -1) {
                    time = thread.getLastTime();
                }
                else {
                    time = Math.min(time, thread.getLastTime());
                }
            }
        }
        resumo.setLastTime(time);
        return resumo;
    }
    
    /**
     * @return the restrictMode
     */
    public boolean isRestrictMode() {
        return restrictMode;
    }

    /**
     * @param restrictMode the restrictMode to set
     */
    public void setRestrictMode(boolean restrictMode) {
        this.restrictMode = restrictMode;
    }

    /**
     * @return the numberOfCrawlers
     */
    public int getNumberOfCrawlers() {
        return numberOfCrawlers;
    }

    /**
     * @param numberOfCrawlers the numberOfCrawlers to set
     */
    public void setNumberOfCrawlers(int numberOfCrawlers) {
        this.numberOfCrawlers = numberOfCrawlers;
    }

    public void shutdown() {
        controller.shutdown();
        controller.waitUntilFinish();
    }
}
