package infiews.crawler;

import infiews.crawler.exceptions.CrawlerDatabaseException;
import infiews.util.Logger;
import infiews.web.Page;
import infiews.web.Site;

import java.util.ArrayList;
import java.util.List;

public final class CrawlerController {

    public CrawlerController() throws Exception {
        taskManager = new TaskManager();
    }

	public <T extends WebCrawler> void start(Site site) {
		try {
			threads = new ArrayList<Thread>();
			crawlers = taskManager.getThreads();
			List<Page> pages = Crawler.getDb().loadAllPages(site.getNamespace());
			taskManager.addToResultList(pages);
			//taskManager.addTasks(pages);
			
			String[] startUrls = site.getStartUrls();
            for(String startUrl : startUrls){
                Page page = new Page(startUrl);
				if( !taskManager.hasTask(page) ){
					page.setIsMustSave(false);
					taskManager.addTask(page);
				}
            }

			log.info("Start " + taskManager.getTaskCount());
			int numberOfCrawlers = site.getThreads();

			for (int i = 1; i <= numberOfCrawlers; i++) {
				WebCrawler crawler = new WebCrawler(site, this);
				Thread thread = new Thread(crawler, "Thread " + i + " of site '" + site.getNamespace() + "'");
				
				thread.start();
				crawlers.add(crawler);
				threads.add(thread);
				log.info("Crawler " + i + " started.");
			}
			
			while (true) {
				sleep(5);

				if (!isAnyThreadWorking()) {
                    //if (taskManager.getTaskCount() > 0) {
                    //    continue;
                    //}

                    log.info("All of the crawlers are stopped. Finishing the process.");
                    for (WebCrawler crawler1 : crawlers) {
                        crawler1.onBeforeExit();
                    }
                    return;
				}

				for( int i=0; i<threads.size(); i++ ){
					Thread thread = threads.get(i);
					if (!thread.isAlive()) {
						log.info("Thread " + i + " was dead, I'll recreate it.");
						WebCrawler crawler = crawlers.get(i);
						thread = new Thread(crawler, "Thread " + i + " of site '" + site.getNamespace() + "'");
						threads.set(i, thread);
						thread.start();
					}
				}
			}
		} catch (Exception e) {
			log.exception(e);
		}finally{
			try{
				taskManager.flushPages();
			}catch(CrawlerDatabaseException e){
				log.error(e.getMessage());
			}
		}
	}

	private void sleep(int seconds) {
		try {
			Thread.sleep(seconds * 1000);
		} catch (Exception ignored) {
		}
	}
	
	private boolean isAnyThreadWorking() {
		boolean someoneIsWorking = false;
		if( threads != null ){
			for (Thread thread : threads) {
				if (thread.isAlive() /* && thread.getState() == State.RUNNABLE*/) {
					someoneIsWorking = true;
				}
			}
		}
		return someoneIsWorking;
	}

	public TaskManager getTaskManager(){
		return taskManager;
	}

    private TaskManager taskManager;
    private Logger log = Crawler.log();

    List<Thread> threads;
	List<WebCrawler> crawlers;
}