package com.bigdata.collector.labels;

import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.log4j.Logger;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

import com.bigdata.collector.model.Page;
import com.bigdata.collector.util.HttpUtils;

public class Collector4Laohucaijing extends AbstractCollector {

	public Collector4Laohucaijing(String seed) {
		super(seed);
	}

	private static Logger logger = Logger
			.getLogger(Collector4Laohucaijing.class);

	public static void main(String[] args) {
		String seed = "http://www.laohucaijing.com/tags";
		AbstractCollector lc = new Collector4Laohucaijing(seed);
		lc.run();
	}

	@Override
	public void crawler(String url) {
		try {
			Page page = HttpUtils.fetchHttpResponse(url);
			Document doc = Jsoup.parse(new String(page.content, "utf-8"));
			extractTags(doc);
		} catch (Exception e) {
			logger.error(ExceptionUtils.getStackTrace(e));
		}

	}

	@Override
	boolean extractTags(Document doc) {
		Elements es = doc.getElementById("area1").getElementsByTag("a");
		String category = "0";
		for (Element e : es) {
			String t = e.text().trim();
			save2DB(t, category);
		}
		return true;
	}

	@Override
	public void extractURLs(Document doc) {

	}
}
