package net.trustie.one;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import javax.annotation.Resource;

import net.trustie.dao.RecordDao;
import net.trustie.downloader.DataBasePageErrorOutPut;
import net.trustie.downloader.PageDao;
import net.trustie.model.cto51_blog_Model;
import net.trustie.utils.Constant;
import net.trustie.utils.DateHandler;

import org.springframework.beans.BeansException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;

import core.ModelPipeline;
import core.PageModelPipeline;
import extension.RawPage;

/**
 * <p>Title:抽取服务程序入口</p>
 * <p>Description:利用抽取模板，从爬虫数据库中抽取站点的项目、文档信息，
 * 抽取至抽取数据库中对应站点的分表中</p>
 * @author  
 * @date 2016年12月22日
 */
@Component("Application")
public class Application {
	/** 站点列表配置文件路径 */
	private String sitesPath = Constant.SITE_PATH;
	/** 抽取状态：indicate if a site is being extracted ，状态为true时表示正在抽取中无需重复抽取*/
	public static Map<String,Boolean> extractState = new HashMap<String,Boolean>();
	/** 线程池，用以装载抽取线程 参数：20为最大线程数 */
	private ExecutorService pool = Executors.newFixedThreadPool(Constant.POOL_MAX_SIZE);
	
	public static void main(String args[]){
		((Application)AppContext.appContext.getBean("Application")).startJob();
	}
	
	public void startJob() {
		while(true){
			//从文件中读取要抽取哪些站点
			List<String> sites = readSitesFromConfig();
			if(sites.size() == 0)
				continue;
			
			for( String site : sites){
				Boolean state = extractState.get(site);
				if( state == null || state == false){
					extractState.put(site, true);
				}
				else if(state == true){//抽取中，无需重复抽取
					continue;
				}
				//用线程池抽取各个站点
				ExtractThread et = (ExtractThread)AppContext.appContext.getBean("extractThread");
				et.setSite(site);
				//提交任务到线程池，无返回值
				pool.execute(et);
			}
			try {
				System.out.println(Thread.currentThread().getName() + ": " + 
									"sleep some time for next round " + " @ " + DateHandler.getExtractTime());
				Thread.sleep(1 * 60 * 1000);
				System.out.println(Thread.currentThread().getName() + ": " + 
									"next round is about to excute  " + " @ " + DateHandler.getExtractTime());
			} catch (InterruptedException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		
	}

	/**
	 * 读取站点配置文件
	 * @return 站点列表
	 */
	private List<String> readSitesFromConfig() {
		File file = new File(sitesPath); 
		LinkedList<String> tasks = new LinkedList<String>();
        BufferedReader reader = null;  
        try {  
            reader = new BufferedReader(new FileReader(file));  
            String site = null;   
            while ((site = reader.readLine()) != null) {  
               tasks.add(site.trim());
            }  
            reader.close(); 
        } catch (IOException e) {  
            e.printStackTrace();  
        } finally {  
            if (reader != null) {  
                try {  
                    reader.close();  
                } catch (IOException e1) {  
                }  
            }  
        } 
        return tasks;
	}
}



