package com.bigdata.collector.labels;

import java.io.BufferedWriter;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.Queue;
import java.util.Set;

import org.apache.log4j.Logger;
import org.jsoup.nodes.Document;

import com.bigdata.collector.dao.LabelsDAO;

public abstract class AbstractCollector implements Runnable {

	private static Logger logger = Logger.getLogger(AbstractCollector.class);
	static int store = 0;
	static int invalid = 0;
	static int duplicate = 0;
	LabelsDAO tagsDao;
	Queue<String> candidate = new LinkedList<String>();
	Set<String> crawled;// 已经采集过

	public AbstractCollector(String seed) {
		crawled = new HashSet<String>();
		candidate.add(seed);
	}

	abstract public void crawler(String url);

	abstract boolean extractTags(Document doc);

	abstract public void extractURLs(Document doc);

	public void save2DB(String t, String category) {
		LabelsDAO tagsDao = new LabelsDAO();
		int r = tagsDao.singleSave(t, category);
		if (r == 0)
			invalid++;
		if (r == 1)
			store++;
		if (r == 2)
			duplicate++;
	}

	public void save2File(String fileName, String label, String category,
			boolean append) {
		try {
			BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
					new FileOutputStream("./store/labels/" + fileName + ".txt",
							append)));
			bw.write(label + "|" + category + "\n");
			bw.close();
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	@Override
	public void run() {
		while (true) {
			String url = candidate.poll();
			crawled.add(url);
			if (url == null)
				break;
			crawler(url);
			logger.info("candidate=" + candidate.size());
			logger.info("crawled=" + crawled.size());
		}
		logger.info("store=" + store + ",invalid=" + invalid + ",dupicate="
				+ duplicate);
	}
}
