package com.sentiment.crawler;

import java.util.ArrayList;
import java.util.Date;
import java.util.List;

import org.apache.log4j.PropertyConfigurator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.sentiment.config.Config;
import com.sentiment.config.ConfigCrawler;
import com.sentiment.contentextractor.ContentExtractor;
import com.sentiment.contentextractor.News;
import com.sentiment.database.bean.CrawlNews;
import com.sentiment.nlpalgo.bean.Double2;
import com.sentiment.nlpalgo.bean.Keywords;
import com.sentiment.tools.Format;
import com.sentiment.tools.Generator;
import com.sentiment.webcollector.crawler.DeepCrawler;
import com.sentiment.webcollector.model.Links;
import com.sentiment.webcollector.model.Page;

/**
 * 测试百度新闻搜索的主页，动态解析链接为961，静态解析链接为915 选取热点要问区域的链接
 * 
 * 该爬虫辅助每日舆情的爬取，不是在搜索引擎，而是在百度新闻平台主页上选取重点新闻
 * 将重点新闻的内容记录为CrawlNews，再到搜索引擎上爬取与该重点新闻有关的信息
 * @author 王骏科
 *
 */
public class DailyNewsCrawler extends DeepCrawler {
	private static final Logger LOG = LoggerFactory.getLogger(DailyNewsCrawler.class);
	private List<CrawlNews> newsList = new ArrayList<CrawlNews>();

	public List<CrawlNews> getNewslist() {
		return newsList;
	}

	public DailyNewsCrawler(String crawlPath) {
		super(crawlPath);
		// TODO Auto-generated constructor stub
	}

	@Override
	public Links visitAndGetNextLinks(Page page) {
		// TODO Auto-generated method stub
		String url = page.getUrl();
		Links links = new Links();
		if (url.matches(ConfigCrawler.baiduNews)) {
			// Document doc = PhantomjsCore.parser(page);
			// links.addAllFromDocument(doc);
			//取class为hdline0，2，4
			for (int i = 0; i < 5; i += 2) {
				links.addAllFromDocument(page.getDoc(), ".hdline" + i);
			}
		} else {
			CrawlNews cn = new CrawlNews();
			try {
				News n = ContentExtractor.getNewsByHtml(page.getHtml());
				// 将News对象变为CrawlNews对象并存入newsList
				cn.setDate(new Date());
				cn.setTitle(n.getTitle());
				cn.setText(CrawlerUtils.crawlText(n.getContentElement()));
				cn.setUrl(page.getUrl());
				cn.setPublisher(CrawlerUtils.publisherFilter(page.getUrl()));
				if (n.getTime() != null && n.getTime() != "") {
					cn.setReleaseTime(Format.string2Date(n.getTime(), 6));
				}

				if (!Config.useNlp) {
					//提取文章中的关键词和权重
					List<Keywords> keywords = new ArrayList<Keywords>();
					for (int i = 0; i < 5; i++) {
						Keywords words = new Keywords();
						words.setWord(Generator.generateKeyword());
						words.setWeight(Generator.genenrateDouble(0, 1));
						keywords.add(words);
					}
					cn.setKeywords(keywords);
					cn.setComment(Generator.generateInt(10, 100));
					cn.setReprint(Generator.generateInt(10, 100));

					List<Double2> list = new ArrayList<Double2>();
					double d = Generator.genenrateDouble(0, 1);
					list.add(new Double2(d, 1 - d));
					cn.setSentiment(list);
				}
			} catch (Exception e) {
				// TODO Auto-generated catch block
				LOG.error(e.toString());
			}
			newsList.add(cn);
		}
		return links;
	}

	public void getSearch() {
		try {
			addSeed(ConfigCrawler.baiduNews);
			start(2);
		} catch (Exception e) {
			LOG.error(e.toString());
		}
	}

	public static void main(String[] args) {
		PropertyConfigurator.configure(Config.log4jPropertiesPath);
		DailyNewsCrawler crawler = new DailyNewsCrawler(Config.crawlerDataPath);
		crawler.getSearch();
		List<CrawlNews> list = crawler.getNewslist();
		System.out.println(list);
	}
}
