package com.sentiment.crawler;

import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;

import org.json.JSONObject;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.sentiment.config.ConfigCrawler;
import com.sentiment.tools.Generator;
import com.sentiment.webcollector.crawler.DeepCrawler;
import com.sentiment.webcollector.model.Links;
import com.sentiment.webcollector.model.Page;

public class BaiduRelativeCrawler extends DeepCrawler {
	private static final Logger LOG = LoggerFactory.getLogger(BaiduRelativeCrawler.class);

	public BaiduRelativeCrawler(String crawlPath) {
		super(crawlPath);
		// TODO Auto-generated constructor stub
	}

	private List<JSONObject> list = new ArrayList<JSONObject>();

	public List<JSONObject> getList() {
		return list;
	}

	@Override
	public Links visitAndGetNextLinks(Page page) {
		// TODO Auto-generated method stub
		Document doc = page.getDoc();
		Element ele = doc.getElementById("rs");
		Elements eles = ele.getElementsByTag("a");
		Iterator<Element> iter = eles.iterator();
		while (iter.hasNext()) {
			String s = iter.next().text();
			JSONObject json = new JSONObject();
			json.put("word", s);
			// 随机产生权重值
			json.put("weight", Generator.genenrateDouble(0.3, 0.8));
			list.add(json);
		}
		return null;
	}

	public void getSearch(String keyword) {
		try {
			addSeed(ConfigCrawler.baiduCrw + URLEncoder.encode(keyword, "utf-8"));
			start(1);
		} catch (Exception e) {
			LOG.error(e.toString());
		}
	}

}
