package Data;

import edu.uci.ics.crawler4j.crawler.CrawlConfig;
import edu.uci.ics.crawler4j.crawler.CrawlController;
import edu.uci.ics.crawler4j.fetcher.PageFetcher;
import edu.uci.ics.crawler4j.robotstxt.RobotstxtConfig;
import edu.uci.ics.crawler4j.robotstxt.RobotstxtServer;
import edu.uci.ics.crawler4j.url.WebURL;
import org.apache.commons.io.FileUtils;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Properties;

public class LocalDataCollectorController {
    private final static int UNITS_TIME = 1000;
    static CrawlController controller;
    static int numberOfCrawlers;
/*
    public static void main(String args[]) throws Exception{


        String rootFolder = "C:\\Users\\User\\Documents\\Eclipse_Projects\\Maven_Testing_SVN\\src\\main\\java";

        CrawlConfig config1 = new CrawlConfig();
        CrawlConfig config2 = new CrawlConfig();

        /*
        * The two crawlers should have different storage folders for their
        * intermediate data

        config1.setCrawlStorageFolder(rootFolder + "/crawler1");
        config2.setCrawlStorageFolder(rootFolder + "/crawler2");

        config1.setPolitenessDelay(2000);
        config2.setPolitenessDelay(1000);
        config1.setMaxOutgoingLinksToFollow(5000);
        config2.setMaxOutgoingLinksToFollow(5000);
        config1.setMaxPagesToFetch(-1);
        config2.setMaxPagesToFetch(-1);
        config1.setMaxDepthOfCrawling(-1);
        config2.setMaxDepthOfCrawling(-1);

        /*
        * We will use different PageFetchers for the two crawlers.

        PageFetcher pageFetcher1 = new PageFetcher(config1);
        PageFetcher pageFetcher2 = new PageFetcher(config2);

        /*
        * We will use the same RobotstxtServer for both of the crawlers.

        RobotstxtConfig robotstxtConfig = new RobotstxtConfig();
        RobotstxtServer robotstxtServer = new RobotstxtServer(robotstxtConfig, pageFetcher1);

        CrawlController controller1 = new CrawlController(config1, pageFetcher1, robotstxtServer);
        CrawlController controller2 = new CrawlController(config2, pageFetcher2, robotstxtServer);

        String[] crawler1Domains = new String[] { "http://www.fct.unl.pt/", "http://www.biblioteca.fct.unl.pt/" };
        String[] crawler2Domains = new String[] { "http://www.unl.pt/" };

        controller1.setCustomData(crawler1Domains);
        controller2.setCustomData(crawler2Domains);

        controller1.addSeed("http://www.fct.unl.pt/");

        controller2.addSeed("http://www.unl.pt/");

        /*
        * The first crawler will have 5 cuncurrent threads and the second
        * crawler will have 7 threads.

        controller1.startNonBlocking(LocalDataCollectorCrawler.class, 5);
        controller2.startNonBlocking(LocalDataCollectorCrawler.class, 7);
        controller1.waitUntilFinish();
        System.out.println("Crawler 1 is finished.");

        controller2.waitUntilFinish();
        System.out.println("Crawler 2 is finished.");

    }
*/

    public static void main(String[] args) throws Exception {

        Properties crawlProperties = new Properties();
        FileInputStream crawlFile;

        try {
            crawlFile = new FileInputStream(Settings.CONFIGURATION);
            crawlProperties.load(crawlFile);
        } catch (FileNotFoundException fnf_ex) {
            fnf_ex.getMessage();
        } catch (IOException io_ex) {
            io_ex.getMessage();
        }

        numberOfCrawlers = Integer.parseInt(crawlProperties.getProperty("crawlerNumber"));
        CrawlConfig config = new CrawlConfig();
        DateFormat dateFormat = new SimpleDateFormat("dd/MM/yyyy HH:mm");
        Date date = new Date();

        String rootFolder = "C:\\Users\\User\\Documents\\Eclipse_Projects\\Maven_Testing_SVN\\src\\main\\java";
        String dat = dateFormat.format(date);
        config.setCrawlStorageFolder(rootFolder + dat.replace("/", "-").replace(" ", "_").replace(":", "_"));//(crawlProperties.getProperty("RootFolder"));
        config.setUserAgentString(crawlProperties.getProperty("UserAgent"));
        config.setPolitenessDelay(Integer.parseInt(crawlProperties.getProperty("politeness")) * UNITS_TIME);
        config.setMaxDepthOfCrawling(Integer.parseInt(crawlProperties.getProperty("depth")));
        config.setMaxPagesToFetch(Integer.parseInt(crawlProperties.getProperty("maxfetch")));

        PageFetcher pageFetcher = new PageFetcher(config);
        RobotstxtConfig robotstxtConfig = new RobotstxtConfig();
        RobotstxtServer robotstxtServer = new RobotstxtServer(robotstxtConfig, pageFetcher);
        controller = new CrawlController(config, pageFetcher, robotstxtServer);
        Properties siteProp = new Properties();
        FileInputStream siteFile;

        try {
            siteFile = new FileInputStream(Settings.WEB_SITES);
            siteProp.load(siteFile);
        } catch (FileNotFoundException fnf_ex) {
            fnf_ex.getMessage();
        } catch (IOException io_ex) {
            io_ex.getMessage();
        }

        createDirs();
        WebURL url = new WebURL();
        url.setURL("http://www.chiptec.net/componentes-para-computadores/processadores");
        controller.addSeed(url.getURL().toString());
        controller.startNonBlocking(LocalDataCollectorCrawler.class, numberOfCrawlers);
        System.out.println("Crawl started");

        Thread.sleep(2 * 60 * 1000);
        controller.shutdown();
        controller.waitUntilFinish();


        List<Object> crawlersLocalData = controller.getCrawlersLocalData();
        long totalLinks = 0;
        long totalTextSize = 0;
        int totalProcessedPages = 0;
        for (Object localData : crawlersLocalData) {
            CrawlStat stat = (CrawlStat) localData;
            totalLinks += stat.getTotalLinks();
            totalProcessedPages += stat.getTotalProcessedPages();
        }
        System.out.println("Aggregated Statistics:");
        System.out.println("   Processed Pages: " + totalProcessedPages);
        System.out.println("   Total Links found: " + totalLinks);
        System.out.println("   Total Text Size: " + totalTextSize);

    }

    private static void createDirs() {
        Properties crawlProperties = new Properties();
        FileInputStream crawlFile;

        try {
            crawlFile = new FileInputStream(Settings.CONFIGURATION);
            crawlProperties.load(crawlFile);
        } catch (FileNotFoundException fnf_ex) {
            fnf_ex.getMessage();
        } catch (IOException io_ex) {
            io_ex.getMessage();
        }
        StringBuilder makeDir = new StringBuilder();
        makeDir.append(crawlProperties.getProperty("storage")).append(crawlProperties.getProperty("HTMLPages"));
        System.out.println(makeDir.toString());
        try {
            FileUtils.deleteDirectory(new File(makeDir.toString()));
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            (new File(makeDir.toString())).mkdirs();
        }

    }

}