package crawler;

/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */

import MetricsRetriever.*;
import General.*;
import LinkRetriever.*;
import java.util.ArrayList;
/**
 *
 * @author Manuel
 */
public class Scheduler {



    private int crawlerStates;
    private int crawlerNumber;
    private int repository;
    private String repos;
    private ArrayList<Thread> linkCrawlerThreads;

    public Scheduler(String repo, String limit){
        Integer i = Integer.parseInt(limit)/2;

        Globals.crawlLimit = i.intValue();
        repos = repo;
        crawlerStates = 0;
        linkCrawlerThreads = new ArrayList<Thread>();
    }

    public void startCrawler(){
        this.startLinkRetriever();
        this.startInformationRetriever();
    }

    public void startLinkRetriever(){
        if(repos.contains("google")){
            linkCrawlerThreads.add(SiteManager.crawlWebSite(Globals.GOOGLE_SITE, Globals.FILE_LINKS[0]));
        }
        if(repos.contains("sourceforge")){
            linkCrawlerThreads.add(SiteManager.crawlWebSite(Globals.SOURCEFORGE_SITE ,Globals.FILE_LINKS[1]));
        }

        for(int i = 0;i<linkCrawlerThreads.size();i++){
            try {
                linkCrawlerThreads.get(i).join();
            } catch (InterruptedException ex) {
                System.out.println("Crawler Thread "+ i + " crashed");
            }
        }
        //add other sitecrawlers always increasing the number of crawlers respectively
    }

    public void startInformationRetriever(){
        RetrievalManager rm = new RetrievalManager(Globals.FILE_LINKS);
        rm.processLinks();
    }

}
