package com.terren.spider.core.html.scheme.impl;

import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;

import org.apache.commons.lang3.StringUtils;

import com.terren.spider.entity.biz.ArticleSearch;
import com.terren.spider.entity.config.SysConfig;
import com.terren.spider.entity.core.Entry;

import us.codecraft.webmagic.Page;
import us.codecraft.webmagic.selector.Html;

public class BaiduNewsScheme extends BasicCustomScheme{

	@Override
	public List<ArticleSearch> findTitleInfo(Page page, Entry entry) {
		List<ArticleSearch> articleList = new ArrayList<>();
		//----------------------------------爬取搜索页面的标题，发布时间，url
		Html pageHtml = page.getHtml();
		List<String> partList = pageHtml.xpath("//div[@class='result']").all();//待爬取的新闻列表
		String pageNumStr = pageHtml.xpath("//*[@id='page']/strong/span[2]/text()").get();
		Integer pageNum = 1;//默认第一页
		if(pageNumStr!=null){
			pageNum = Integer.parseInt(pageNumStr);
		}
		
		for(int i=1;i<=partList.size();i++){
			ArticleSearch temp = new ArticleSearch();
			Integer id =(pageNum-1)*SysConfig.RECORD_SIZE+i;
			temp.setTitle(pageHtml.xpath("//*[@id='"+id+"']/h3/a/text()").get());
			String authorClass = pageHtml.xpath("//*[@id='"+id+"']/div/p/text()").get();
			if(authorClass==null){
				authorClass=pageHtml.xpath("//*[@id='"+id+"']/div/div[2]/p/text()").get();
			}
			temp.setNewsSource(StringUtils.substringBefore(authorClass, "  "));
			temp.setPubTimeStr(StringUtils.substringAfter(authorClass, "  "));
			if("".equals(temp.getPubTimeStr())||temp.getPubTimeStr()==null){
				temp.setNewsSource("");
				temp.setPubTimeStr(authorClass);
			}
			String url = pageHtml.xpath("//*[@id='"+id+"']/h3/a").links().get();
			temp.setUrl(url);
			//temp.setHeadStr(pageHtml.xpath("/html/head/meta").get());
			temp.setSourceId(entry.getSourceId());
			temp.setTaskId(entry.getTaskId());
			if(!"".equals(url)&&url!=null){
				articleList.add(temp);
			}
		}
		return articleList;
		
	}

	@Override
	public Set<String> findHelpPaginationUrl(Page page, Entry entry) {
		List<String> pgList = new ArrayList<String>();// 存放分页list
		boolean flag = false;
		List<String> pageUrls = page.getHtml().$("#page").links().all();
		for (int i = 0; i < pageUrls.size(); i++) {
			if (pageUrls.get(i).contains("rsv_page=1")) {//抓【下一页】链接
				pgList.add(pageUrls.get(i));
			}
			if (pageUrls.get(i).contains("rsv_page=-1")||pageUrls.get(i).contains("rsv_page=1")) {//包含上一页或者下一页链接
				flag = true;
			}
		}
		if (!flag) {//没有上一页链接也没有下一页链接 ，取全部
			pgList.addAll(pageUrls);
		}
		//pageUrls.addAll(pageUrls);
		Set<String> resultSet = new HashSet<>();
		resultSet.addAll(pgList);
		return resultSet;
	}	

}
