package cn.edu.hfut.dmic.webcollector.crawler;

import java.io.File;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import cn.edu.hfut.dmic.webcollector.fetcher.CustomBehavior;
import cn.edu.hfut.dmic.webcollector.fetcher.DbUpdater;
import cn.edu.hfut.dmic.webcollector.fetcher.Fetcher;
import cn.edu.hfut.dmic.webcollector.fetcher.VisitorFactory;
import cn.edu.hfut.dmic.webcollector.generator.Injector;
import cn.edu.hfut.dmic.webcollector.generator.StandardGenerator;
import cn.edu.hfut.dmic.webcollector.net.HttpRequester;
import cn.edu.hfut.dmic.webcollector.net.HttpRequesterImpl;
import cn.edu.hfut.dmic.webcollector.net.Proxys;
import cn.edu.hfut.dmic.webcollector.util.FileUtils;

import com.model.rule.FileRule;
import com.model.xmlElement.DataColumns;
import com.model.xmlElement.DataColumnsSingleton;
import com.model.xmlElement.input.Target;
import com.sleepycat.je.Environment;
import com.sleepycat.je.EnvironmentConfig;
import com.tool.UUIDUtil;


public abstract class Crawler implements VisitorFactory,CustomBehavior {

    public static final Logger LOG = LoggerFactory.getLogger(Crawler.class);

    protected int status;
    protected int retry = 3;
    public final static int RUNNING = 1;
    public final static int STOPED = 2;
    protected boolean resumable = false;
    protected int threads;
    protected Set<String> seeds = new HashSet<String>();
    protected Set<String> lostSeeds = new HashSet<String>();
    protected Set<String> forcedSeeds = new HashSet<String>();
    protected Fetcher fetcher;

    protected VisitorFactory visitorFactory = this;
    protected HttpRequester httpRequester = new HttpRequesterImpl();
    String crawlPath;

    Environment env;
    
    private DataColumns dataColumns;								//xml配置
    private StringBuffer data;										//保存执行结果数据
    private StringBuffer seedsData;									//保存所有的种子
    private AtomicLong totalSize;									//保存执行结果数据数量长度
	private AtomicLong totalBytesLength;							//保存执行结果数据字节长度 
	private FileRule fileRule;										//文件写入规则
	private boolean overCrawlerSeed;								//抓取种子是否结束
	
    public Crawler(String crawlPath) {
        this.crawlPath = crawlPath;
    }
    public Crawler(String crawlPath,String xmlFileName){
    	this.crawlPath = crawlPath;
    	this.data = new StringBuffer();
    	this.seedsData = new StringBuffer();
    	this.totalSize = new AtomicLong();
    	this.totalBytesLength = new AtomicLong();
		initDataColumns(xmlFileName);
		int threadsNumber = dataColumns.getInput().getTarget().getThreadsNumber();
		this.setThreads(threadsNumber);
    }
    
    private void initDataColumns(String xmlFileName){
		dataColumns = DataColumnsSingleton.getSingle(new File(xmlFileName));
		Target target = dataColumns.getInput().getTarget();
		target.setTaskId(UUIDUtil.createUUID());
		target.setTaskTime(new SimpleDateFormat("yyyyMMddHHmmss").format(new Date()));
		target.setTaskUser(dataColumns.getOutput().getOutputFile().getUser());
		dataColumns.getInput().setTarget(target);
	}
	
    public void inject() throws Exception {
        Injector injector = new Injector(env);
        injector.inject(seeds);
    }

    public void injectForcedSeeds() throws Exception {
        Injector injector = new Injector(env);
        injector.inject(forcedSeeds);
    }

    public void start(int depth) throws Exception {
        File dir = new File(crawlPath);
        boolean needInject = true;

        if (resumable && dir.exists()) {
            needInject = false;
        }
        if (resumable && !dir.exists()) {
            dir.mkdirs();
        }
        if (!resumable) {

            if (dir.exists()) {
                FileUtils.deleteDir(dir);
            }
            dir.mkdirs();

            if (seeds.isEmpty() && forcedSeeds.isEmpty()) {
                LOG.info("error:Please add at least one seed");
                return;
            }

        }
        EnvironmentConfig environmentConfig = new EnvironmentConfig();
        environmentConfig.setAllowCreate(true);
        env = new Environment(dir, environmentConfig);

        if (needInject) {
            inject();
        }

        if (!forcedSeeds.isEmpty()) {
            injectForcedSeeds();
        }

        status = RUNNING;
        for (int i = 0; i < depth; i++) {
        	if(!overCrawlerSeed){
        		if (status == STOPED) {
                    break;
                }
                LOG.info("starting depth " + (i + 1));

                StandardGenerator generator = new StandardGenerator(env);
                fetcher = new Fetcher();
                fetcher.setRetry(retry);
                fetcher.setHttpRequester(httpRequester);
                fetcher.setDbUpdater(new DbUpdater(env));
                fetcher.setVisitorFactory(visitorFactory);
                fetcher.setThreads(threads);
                fetcher.fetchAll(generator,(i + 1));
        	}
        }
        env.close();
    }
    
    public void stop(){
        status=STOPED;
        fetcher.stop();
    }

    public VisitorFactory getVisitorFactory() {
        return visitorFactory;
    }

    public void setVisitorFactory(VisitorFactory visitorFactory) {
        this.visitorFactory = visitorFactory;
    }

    public HttpRequester getHttpRequester() {
        return httpRequester;
    }

    public void setHttpRequester(HttpRequester httpRequester) {
        this.httpRequester = httpRequester;
    }

    /**
     * 添加一个种子url(如果断点爬取，种子只会在第一次爬取时注入)
     *
     * @param seed 种子url
     */
    public void addSeed(String seed) {
        seeds.add(seed);
    }

    /**
     * 添加一个种子url(如果断点爬取，种子会在每次启动爬虫时注入， 如果爬取历史中有相同url,则覆盖)
     *
     * @param seed
     * @param update
     */
    public void addForcedSeed(String seed) {
        forcedSeeds.add(seed);
    }

    public Set<String> getSeeds() {
        return seeds;
    }

    public void setSeeds(Set<String> seeds) {
        this.seeds = seeds;
    }

    public Set<String> getForcedSeeds() {
        return forcedSeeds;
    }

    public void setForcedSeeds(Set<String> forcedSeeds) {
        this.forcedSeeds = forcedSeeds;
    }

    public boolean isResumable() {
        return resumable;
    }

    public void setResumable(boolean resumable) {
        this.resumable = resumable;
    }

    public int getThreads() {
        return threads;
    }

    public void setThreads(int threads) {
        this.threads = threads;
    }

    public Proxys getProxys() {
        return httpRequester.getProxys();
    }

    public void setProxys(Proxys proxys) {
        httpRequester.setProxys(proxys);
    }
    
    public int getRetry() {
        return retry;
    }

    public void setRetry(int retry) {
        this.retry = retry;
    }

	public Fetcher getFetcher() {
		return fetcher;
	}
	public void setFetcher(Fetcher fetcher) {
		this.fetcher = fetcher;
	}
	public Set<String> getLostSeeds() {
		return lostSeeds;
	}
	public void setLostSeeds(Set<String> lostSeeds) {
		this.lostSeeds = lostSeeds;
	}
	
	public AtomicLong getTotalSize() {
		return totalSize;
	}
	public void setTotalSize(AtomicLong totalSize) {
		this.totalSize = totalSize;
	}
	public AtomicLong getTotalBytesLength() {
		return totalBytesLength;
	}
	public void setTotalBytesLength(AtomicLong totalBytesLength) {
		this.totalBytesLength = totalBytesLength;
	}
	public DataColumns getDataColumns() {
		return dataColumns;
	}
	public void setDataColumns(DataColumns dataColumns) {
		this.dataColumns = dataColumns;
	}
	public StringBuffer getData() {
		return data;
	}
	public void setData(StringBuffer data) {
		this.data = data;
	}
	public FileRule getFileRule() {
		return fileRule;
	}
	public void setFileRule(FileRule fileRule) {
		this.fileRule = fileRule;
	}
	public StringBuffer getSeedsData() {
		return seedsData;
	}
	public void setSeedsData(StringBuffer seedsData) {
		this.seedsData = seedsData;
	}
	public boolean isOverCrawlerSeed() {
		return overCrawlerSeed;
	}
	public void setOverCrawlerSeed(boolean overCrawlerSeed) {
		this.overCrawlerSeed = overCrawlerSeed;
	}
	
}
