package net.trustie.one;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import javax.annotation.Resource;

import net.trustie.dao.RecordDao;
import net.trustie.downloader.DataBasePageErrorOutPut;
import net.trustie.downloader.PageDao;
import net.trustie.model.cto51_blog_Model;
import net.trustie.utils.DateHandler;

import org.springframework.beans.BeansException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;

import core.ModelPipeline;
import core.PageModelPipeline;
import extension.RawPage;

@Component("Application")
public class Application {
	
	private String sitesPath = "tasks.txt";
	//indicate if a site is being extracted 
	public static Map<String,Boolean> extractState = new HashMap<String,Boolean>();
	private ExecutorService pool = Executors.newFixedThreadPool(20);
	public static void main(String args[]){
		((Application)AppContext.appContext.getBean("Application")).startJob();
	}
	
	public void startJob() {
		while(true){
			//从文件中读取要抽取哪些站点
			List<String> sites = readSites();
			if(sites.size() == 0)
				continue;
			
			for( String site : sites){
				Boolean state = extractState.get(site);
				if( state == null || state == false){
					extractState.put(site, true);
				}
				else if(state == true){
					continue;
				}
				//用线程池抽取各个站点
				ExtractThread et = (ExtractThread)AppContext.appContext.getBean("extractThread");
				et.setSite(site);
				pool.execute(et);								
			}
			try {
				System.out.println(Thread.currentThread().getName() + ": " + 
									"sleep some time for next round " + " @ " + DateHandler.getExtractTime());
				Thread.sleep(1 * 60 * 1000);
				System.out.println(Thread.currentThread().getName() + ": " + 
									"next round is about to excute  " + " @ " + DateHandler.getExtractTime());
			} catch (InterruptedException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		
	}

	private List<String> readSites() {
		File file = new File(sitesPath); 
		LinkedList<String> tasks = new LinkedList<String>();
        BufferedReader reader = null;  
        try {  
            reader = new BufferedReader(new FileReader(file));  
            String site = null;   
            while ((site = reader.readLine()) != null) {  
               tasks.add(site.trim());
            }  
            reader.close(); 
        } catch (IOException e) {  
            e.printStackTrace();  
        } finally {  
            if (reader != null) {  
                try {  
                    reader.close();  
                } catch (IOException e1) {  
                }  
            }  
        } 
        return tasks;
	}
}

@Component("extractThread")
@Scope("prototype")
class ExtractThread implements Runnable{
	
	private String site;
	private static final int BatchExtractSize = 100;
	public List<String> modelName = new ArrayList<String>();
	private Class pageModel;
	@Resource
	private PageDao pagedao;
	@Qualifier("errorPageToDB")
	@Autowired
	private DataBasePageErrorOutPut pageErrorOutPut;
	
	@Resource
	private RecordDao recordDao;
	public ExtractThread(){
		
	}
	public ExtractThread(String site){
		this.site = site;
	}
	public void setSite(String site){
		this.site = site;
	}
	public void run() {
		//设置threadname 以便查看输出日志
		Thread.currentThread().setName(site + "_thread");
		this.pageErrorOutPut.setTableName(site + "_error_page");	
		try {	
			
			System.out.println(Thread.currentThread().getName() + ": " + 
					"extract begain" + " @ " + DateHandler.getExtractTime());
			if(site.equals("51cto_blog"))
				pageModel = cto51_blog_Model.class;
			else
				pageModel = Class.forName("net.trustie.model." + site +"_Model");
			modelName.add(pageModel.getCanonicalName());
			
			//获取该站点的抽取模板和要抽取的html页面		
			int lastId = getLastId(site);
			List<RawPage> pages = getPages(site,lastId);
			
			Extractor extractor = new Extractor();	
			RawPage result = null;
			while(pages.size() > 0){	
				
				for( RawPage page : pages){
					try{
						result = extractor.extract(page,pageModel);
						//持久化  并 更新抽取历史
						saveResult(site,result);
						
						
					}catch (Exception e){
						e.printStackTrace();
						pageErrorOutPut.returnErrorPage(page, e);
					}					
				}
				updateLastId(site,lastId + pages.size());
				lastId = getLastId(site);
				pages = getPages(site,lastId);
			}
			Application.extractState.put(site, false);
			System.out.println(Thread.currentThread().getName() + ": " + 
					"extract over, id=" + lastId + " @ " + DateHandler.getExtractTime());
		} catch (ClassNotFoundException e) {
			e.printStackTrace();
		} catch(Exception e){
			System.out.println(e.getMessage());
			e.printStackTrace();
			System.out.println(Thread.currentThread().getName() + ": " +" 发生异常 ");
		}

		
	}
	private void saveResult(String site, RawPage rawPage ) {
		
		ModelPipeline pl = new ModelPipeline();
		try {
			pl.put(pageModel,(PageModelPipeline) AppContext.appContext.getBean(site + "_pipeline"));
		} catch (BeansException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
		if (!rawPage.getPage().getResultItems().isSkip()) {
			pl.process(rawPage.getPage().getResultItems(), null);
		}
		if (!rawPage.getPage()
				.isAllResultSkip(
						modelName.toArray(new String[modelName
								.size()]))) {
			rawPage.setStored(true);
		}else{
			
		}
		
		if (!rawPage.isExtracted() || !rawPage.isStored())
			pageErrorOutPut.returnErrorPage(rawPage,
					"May Caused By Model Problem! Or Page Error!");
		
		
	}
	private void updateLastId(String site, int currentId) {
		recordDao.updateRecord(site,currentId);		
	}

	private void saveResult(String site, List<RawPage> results) {
		//System.out.println("save result");
		ModelPipeline pl = new ModelPipeline();
		try {
			pl.put(pageModel,(PageModelPipeline) AppContext.appContext.getBean(site + "_pipeline"));
		} catch (BeansException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		for(RawPage rawPage : results){
			if (!rawPage.getPage().getResultItems().isSkip()) {
				pl.process(rawPage.getPage().getResultItems(), null);
			}
			if (!rawPage.getPage()
					.isAllResultSkip(
							modelName.toArray(new String[modelName
									.size()]))) {
				rawPage.setStored(true);
			}else{
				System.out.println(rawPage.getPage().getResultItems()+"*****************************");
			}
			
			if (!rawPage.isExtracted() || !rawPage.isStored())
				pageErrorOutPut.returnErrorPage(rawPage,
						"May Caused By Model Problem! Or Page Error!");
		}
	}

	private List<RawPage> getPages(String site, int lastId) {
		LinkedList<RawPage> pages = new LinkedList<RawPage>();
		pages = pagedao.getDetailPages(site + "_html_detail",lastId,BatchExtractSize);
		return pages;
	}

	private int getLastId(String site) {
		Integer lastRecord = recordDao.getLastRecord(site);
		if(lastRecord == null){
			lastRecord = pagedao.getMinId(site + "_html_detail") - 1;
			recordDao.insertRecord(site,lastRecord);
		}
		return lastRecord;
	}
}

