package com.bigdata.collector.labels;

import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;

import org.apache.log4j.Logger;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

import com.bigdata.collector.model.Page;
import com.bigdata.collector.util.HttpUtils;

public class Collector4Cnki extends AbstractCollector{

	private static Logger logger = Logger.getLogger(Collector4Cnki.class);

	public Collector4Cnki(String seed) {
		super(seed);
	}

	public static void main(String[] args) {
		String seed = "http://xuewen.cnki.net/finance.html";
		Collector4Cnki ct = new Collector4Cnki(seed);
		
		Map<String, String> urls = ct.getURLs(seed);
		for (Map.Entry<String, String> entry : urls.entrySet()) {
			ct.crawler(entry);
		}
		logger.info("store=" + store + ",invalid=" + invalid + ",dupicate="
				+ duplicate);

	}

	public Map<String, String> getURLs(String seed) {
		Map<String, String> map = new HashMap<String, String>();
		try {
			Page page = HttpUtils.fetchHttpResponse(seed);
			String content = new String(page.content, "utf-8");
			Document doc = Jsoup.parse(content);

			Element e = doc.getElementById("classification");
			for (Element ae : e.select("a")) {
				map.put(ae.attr("href").trim(), ae.text().trim());
			}

		} catch (Exception e) {
			e.printStackTrace();
		}
		return map;
	}

	public void crawler(Entry<String, String> entry) {
		try {
			String category = "0";
			String url = "http://xuewen.cnki.net/" + entry.getKey();
			save(entry.getValue(), category);
			if (url.indexOf("ChannelList") > -1) {
				int pageNum = 1;
				while (true) {
					String path = url + "&page=" + pageNum;

					Page page = HttpUtils.fetchHttpResponse(path);
					if (page.content == null)
						break;
					String content = new String(page.content, "utf-8");
					Document doc = Jsoup.parse(content);
					if (doc == null)
						break;
					Elements es = doc.getElementsByAttributeValue("class",
							"title");
					if (es.isEmpty())
						break;
					logger.info(path);
					for (Element e : es) {
						Elements temp = e.getElementsByTag("a");
						String t = temp.first().text().trim();
						logger.info(t);
						save(t, category);
					}
					pageNum++;
				}
			}

		} catch (Exception e) {
			e.printStackTrace();
		}
	}

	public void save(String t, String category) {
		int r = tagsDao.singleSave(t, category);
		if (r == 0)
			invalid++;
		if (r == 1)
			store++;
		if (r == 2)
			duplicate++;
	}

	@Override
	public void crawler(String url) {
		// TODO Auto-generated method stub
		
	}

	@Override
	boolean extractTags(Document doc) {
		// TODO Auto-generated method stub
		return false;
	}

	@Override
	public void extractURLs(Document doc) {
		// TODO Auto-generated method stub
		
	}
}
