package com.legleg.run.crawler;

import com.legleg.data.Feed;
import com.legleg.handler.FeedHandler;
import org.apache.log4j.Logger;

import java.util.HashMap;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;

/**
 * Created with IntelliJ IDEA.
 * User: Administrator
 * Date: 12-12-29
 * Time: 下午12:33
 * To change this template use File | Settings | File Templates.
 */
public class CrawlerManager implements Runnable {
    private static final Logger logger = Logger.getLogger(CrawlerManager.class);
    private static ThreadGroup threadGroup = new ThreadGroup("CrawlerThreadGroup");

    public static void main(String[] args) {
        //query all feed every 2 hours
        ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor();
        executor.scheduleWithFixedDelay(new CrawlerManager(), 0, 120, TimeUnit.MINUTES);
    }


    @Override
    public void run() {
        logger.info("start new round of feeds crawler");
        HashMap runingMap = new HashMap();
        if (threadGroup.activeCount() > 0) {
            Thread runing[] = new Thread[threadGroup.activeCount()];
            threadGroup.enumerate(runing);
            for(Thread thread:runing){
                runingMap.put(thread.getName(),"1");
            }
        }

        List<Feed> allFeeds = FeedHandler.getInstance().getAllFeeds();
        logger.info("all feeds size is "+allFeeds==null?0:allFeeds.size());
        if (allFeeds != null) {
            for (Feed feed : allFeeds) {
                if(!runingMap.containsKey("feed-"+feed.getId())){
                    logger.info("start thread feed-"+feed.getId());
                    new Thread(threadGroup,new FeedThread(feed),"feed-"+feed.getId()).start();
                }
            }
        }
        runingMap.clear();
        runingMap = null;

    }
}
