package com.gxljc.bear.crawler.itaogao.mrjjxw;

import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;

import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

import com.esotericsoftware.minlog.Log;
import com.mongodb.BasicDBObject;
import com.mongodb.Bytes;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import com.mongodb.DBObject;
import com.gxljc.commons.mongo.cli.BaseMongoRepository;
import com.gxljc.commons.util.StringUtil;
import com.gxljc.bear.crawler.base.BaseMongoTools;
import com.gxljc.bear.crawler.base.DolphinCrawlerConf;
import com.gxljc.bear.crawler.base.DolphinCrawlerConsts;
import com.gxljc.bear.crawler.itaogao.ItaogaoConst;
import com.gxljc.bear.crawler.itaogao.NewsPageTable;
//import com.gxljc.bear.crawler.itaogao.util.HbaseUtil;
import com.gxljc.bear.crawler.itaogao.util.HttpUtil;
import com.gxljc.bear.crawler.itaogao.util.ImageUtil;
import com.gxljc.bear.crawler.itaogao.util.MongodbUtil;
import com.gxljc.bear.crawler.util.DateUtil;
import com.gxljc.bear.crawler.util.MailUtil;

/**
 * @author haitao E-mail:haitao@summba.com
 * @version createTime：2021年9月28日 上午10:06:39
 * 
 */

public class MrjjxwCrawler {
	private final static Logger LOG = Logger.getLogger(MrjjxwCrawler.class);
	private final static String MONGONAME_COLLECTION = "t_newspaper_mrjjxw";
	private final static String MONGONAME_COLLECTION_SEED = "t_newspaper_mrjjxw";
	private Map<String, Object> newsMap = new HashMap<String, Object>();
	private final static String SOURCE = "每日经济新闻";
	private static int countCrawlerSeed = 0;
	private static int countInsert = 0;
	protected boolean proxy;
	protected String time;
	
	public void setProxy(Boolean proxy) {
		this.proxy = proxy;
	}
	
	public void setTime(String time) {
		this.time = time;
	}

	public MrjjxwCrawler() {
		newsMap.put("status", 0);
		newsMap.put("mediaName", SOURCE);
	}

	/**
	 * =========爬虫启动入口(All)==========
	 */
	public void startCrawlerAll() {
		// 获取日期种子
		List<String> seeds = getTimeSeed();
		for (String seed : seeds) {
			// 若种子url不能访问，continue
			if (!HttpUtil.checkUrlexists(seed)) {
				continue;
			}
			// 获取详情新闻seed
			extractDetailSeed(seed);
			if (countCrawlerSeed % 100 == 0) {
				LOG.info(String.format("Cralwer seed size is %s", countCrawlerSeed));
			}
		}
		// 抓取新闻详情页面
		crawlerNewsDetail();

		LOG.info(String.format("Crawler finished! Successed insert size is %s", countInsert));

	}

	/**
	 * =========爬虫启动入口(update)==========
	 */
	public void startCrawlerUpdate() {
		long start = System.currentTimeMillis();
		List<String> seedToday = getTimeToday();
		for (String seed : seedToday) {
			// 若种子url不能访问，continue
			if (!HttpUtil.checkUrlexists(seed)) {
				continue;
			}
			// 获取详情新闻seed
			extractDetailSeed(seed);
		}
		LOG.info(String.format("Cralwer update seed size is %s", countCrawlerSeed));

		// 抓取新闻详情页面
		crawlerNewsDetail();
		LOG.info(String.format("Crawler update finished! Successed insert size is %s", countInsert));

		// 发送e_mail统计信息
		long timeUsed = System.currentTimeMillis() - start;
		mailReport(countInsert, timeUsed);

	}

	/**
	 * =========爬虫启动入口(update-one)==========
	 */
	@SuppressWarnings("serial")
	public void startCrawlerUpdateOne() {
		long start = System.currentTimeMillis();
		final String url = String.format(ItaogaoConst.NEWSPAPER_SEED.MRJJXW.getValue(), time);
		List<String> seedToday = new ArrayList<String>(){{
			add(url);
		}};
		for (String seed : seedToday) {
			// 若种子url不能访问，continue
			if (!HttpUtil.checkUrlexists(seed)) {
				continue;
			}
			// 获取详情新闻seed
			extractDetailSeed(seed);
		}
		LOG.info(String.format("Cralwer update seed size is %s", countCrawlerSeed));

		// 抓取新闻详情页面
		crawlerNewsDetail();
		LOG.info(String.format("Crawler update finished! Successed insert size is %s", countInsert));

		// 发送e_mail统计信息
		long timeUsed = System.currentTimeMillis() - start;
		mailReport(countInsert, timeUsed);

	}
	
	void crawlerNewsDetail() {
		List<DBObject> seeds = getSeed();
		 if (CollectionUtils.isEmpty(seeds)) {
	            LOG.error("page is empty");
	            return;
	        }
		for(DBObject currObj:seeds) {
			String seed = ObjectUtils.toString(currObj.get("crawlUrl"));
			String logoUrl = ObjectUtils.toString(currObj.get("logoUrl"));
			String title = ObjectUtils.toString(currObj.get("title"));
			NewsPageTable table = new NewsPageTable();
			byte[] htmlByte = null;
			try {
				htmlByte = HttpUtil.returnHtmlByte(seed, proxy);
			} catch (Exception e1) {
				LOG.info(String.format("crawler html Error! url is %s", htmlByte));
				e1.printStackTrace();
			}
			if (htmlByte != null) {
				String html = new String(htmlByte);

				// 基础数据抽取
				extractBaseAttr(html, seed);

				String id = ObjectUtils.toString(StringUtil.genMD5Val(seed));
				String subTitle = "";

				if (StringUtils.isNotBlank(logoUrl)) {
					try {
						// 补齐相对图片路径
						logoUrl = ImageUtil.imageUrlSupply(seed, logoUrl);
						// 图片平台转换
						logoUrl = ImageUtil.imageConvertplatform(logoUrl);
					} catch (Exception e) {
						LOG.info(String.format("logoUrl convert failed! logoUrl is %s", logoUrl));
						e.printStackTrace();
					}
				}

				// 添加基础数据
				newsMap.put("_id", id);
				newsMap.put("title", title);
				newsMap.put("subTitle", subTitle);
				newsMap.put("logoUrl", logoUrl);
				newsMap.put("crawlUrl", seed);
				newsMap.put("crawlFlag", 1);
				newsMap.put("crawlDate", new Date());

				// 更新mongdb
				MongodbUtil.update2Mongodb(newsMap, MONGONAME_COLLECTION);

				table.setTitle(title);
				table.setId(id);

				// 更新到hbase
				// HbaseUtil.saveHbase(table, htmlByte);
				countInsert += 1;
				LOG.info(String.format("insert the id/rowkey is %s ,and url is %s", id,seed));
			}

		}
	}

	void crawlerNewsDetailSingle() {
		BaseMongoRepository repo = BaseMongoTools.getInstance(DolphinCrawlerConsts.MongoDBName.ITAOGAO.getName());
		DBCollection rawDataColl = repo.getCollection(MONGONAME_COLLECTION_SEED);
		DBCursor cursor = rawDataColl.find(new BasicDBObject("_id", "ce2ea90e5acebc2c6f317864ec930da7"));
		while (cursor.hasNext()) {
			DBObject currObj = cursor.next();
			String seed = ObjectUtils.toString(currObj.get("crawlUrl"));
			String logoUrl = ObjectUtils.toString(currObj.get("logoUrl"));
			String title = ObjectUtils.toString(currObj.get("title"));
			NewsPageTable table = new NewsPageTable();
			byte[] htmlByte = null;
			try {
				htmlByte = HttpUtil.returnHtmlByte(seed, proxy);
			} catch (Exception e1) {
				LOG.info(String.format("crawler html Error! url is %s", htmlByte));
				e1.printStackTrace();
			}
			if (htmlByte != null) {
				String html = new String(htmlByte);

				// 基础数据抽取
				extractBaseAttr(html, seed);

				String id = ObjectUtils.toString(StringUtil.genMD5Val(seed));
				String subTitle = "";

				if (StringUtils.isNotBlank(logoUrl)) {
					try {
						// 补齐相对图片路径
						logoUrl = ImageUtil.imageUrlSupply(seed, logoUrl);
						// 图片平台转换
						logoUrl = ImageUtil.imageConvertplatform(logoUrl);
					} catch (Exception e) {
						LOG.info(String.format("logoUrl convert failed! logoUrl is %s", logoUrl));
						e.printStackTrace();
					}
				}

				// 添加基础数据
				newsMap.put("_id", id);
				newsMap.put("title", title);
				newsMap.put("subTitle", subTitle);
				newsMap.put("logoUrl", logoUrl);
				newsMap.put("crawlUrl", seed);
				newsMap.put("crawlFlag", 1);
				newsMap.put("crawlDate", new Date());

				// 更新mongdb
				MongodbUtil.update2Mongodb(newsMap, MONGONAME_COLLECTION);

				table.setTitle(title);
				table.setId(id);

				// 更新到hbase
				// HbaseUtil.saveHbase(table, htmlByte);
				countInsert += 1;
				LOG.info(String.format("insert the id/rowkey is %s ", id));
			}

		}
	}

	
	/**
	 * 获取新闻详情的seed
	 * 
	 * @param url
	 */
	public void extractDetailSeed(String url) {
		try {
			byte[] htmlbyte = HttpUtil.returnHtmlByte(url, proxy);
			if (htmlbyte != null && !new String(htmlbyte).equals("抱歉，没有找到该期报纸")) {
				extractTitleList(new String(htmlbyte));
			}
		} catch (Exception e) {
			LOG.info(String.format("HttpUtil.returnHtmlByte Error!and url is %s", url));
			e.printStackTrace();
		}
	}

	/**
	 * 抽取每日新闻列表
	 * 
	 * @param html
	 */
	private void extractTitleList(String html) {
		Document doc = Jsoup.parse(html);
		Elements elements = doc.select("body div.main div.main-left div ul li");
		if (elements.size() == 0) {
			elements = doc.select("body div.g-main div.main-left div ul li");
		}
		for (Element element : elements) {
			countCrawlerSeed += extractUrlAndLogoUrl(element);
		}
	}

	// 抽取每日新闻列表中的url以及logoUrl
	private int extractUrlAndLogoUrl(Element element) {
		Map<String, Object> map = new HashMap<String, Object>();
		String crawlUrl = "";
		String logoUrl = "";
		String title = "";
		Elements elements = element.select("p a");
		crawlUrl = elements.get(0).attr("href");
		title = elements.get(0).text();
		if (elements.size() > 1) {
			logoUrl = elements.get(1).select("img").get(0).attr("src");
		}
		if (StringUtils.isNotBlank(crawlUrl)) {
			map.put("crawlUrl", crawlUrl);
			map.put("logoUrl", logoUrl);
			map.put("title", title);
			map.put(ItaogaoConst.SCHEMA_COLUMN_NAME.CRAWL_FLAG.getValue(), 0);
			String id = StringUtil.genMD5Val(crawlUrl);
			map.put("_id", id);
			MongodbUtil.update2Mongodb(map, MONGONAME_COLLECTION_SEED);
			return DolphinCrawlerConsts.RESULT_YES;
		} else {
			return DolphinCrawlerConsts.RESULT_NO;
		}
	}

	/**
	 * 获取日期种子并构造url 构造URL seed
	 * 
	 * @return
	 */
	public List<String> getTimeSeed() {
		List<Date> dateSeed = getDateSeed();
		if (CollectionUtils.isEmpty(dateSeed))
			return null;
		List<String> urls = new LinkedList<String>();
		for (Date date : dateSeed) {
			String dateFormat = DateUtil.dateFormat(date, "yyyy-MM-dd");
			String url = String.format(ItaogaoConst.NEWSPAPER_SEED.MRJJXW.getValue(), dateFormat);
			urls.add(url);

		}
		Log.info(String.format("seeds size %s", urls.size()));
		return urls;
	}

	public List<String> getTimeToday() {
		List<String> list = new LinkedList<>();
		Calendar now = Calendar.getInstance();
		Date date = now.getTime();
		String dateFormat = DateUtil.dateFormat(date, "yyyy-MM-dd");
		String url = String.format(ItaogaoConst.NEWSPAPER_SEED.MRJJXW.getValue(), dateFormat);
		list.add(url);
		return list;
	}

	// 获取从当前到1月1号的列表
	private List<Date> getDateSeed() {
		Calendar now = Calendar.getInstance();
		List<Date> list = new LinkedList<>();
		list.add(now.getTime());
		while (true) {
			now.add(Calendar.DATE, -1);
			int month = now.get(Calendar.MONTH);
			int day = now.get(Calendar.DAY_OF_MONTH);
			int week = now.get(Calendar.DAY_OF_WEEK);
			if (month == 0 && day == 1) {
				list.add(now.getTime());
				return list;
			}
			if (week != 7 && week != 1) { // 周六，周日
				list.add(now.getTime());
			}
		}
	}

	/**
	 * 提取新闻详情字段【发布时间，来源，作者，内容】
	 * 
	 * @param html
	 */
	public void extractBaseAttr(String html, String url) {
		Document doc = Jsoup.parse(html);
		// 抽取发布时间、来源，作者
		extractTimeAuthorSource(doc);

		// 抽取content
		extractContent(doc);

		// 抽取images
		extractImages(doc, url);

	}

	/**
	 * 抽取发布时间、来源，作者
	 * 
	 * @param doc
	 */
	@SuppressWarnings("serial")
	public void extractTimeAuthorSource(Document doc) {
		int count = 0;
		List<String> attrList = new ArrayList<String>() {
			{
				add("publishDate");
				add("source");
				add("author");
			}
		};

		Elements elements = doc.select("body div.main div.top-line ul.left li");
		if (elements.size() != 0) {
			LOG.info(elements.size());
			for (Element element : elements) {
				String text = element.select("span").text();
				if (count == 0) {
					Date date = DateUtil.parseFormat(text, "yyyy-MM-dd HH:mm:ss");
					newsMap.put(attrList.get(count), date);
				} else {
					newsMap.put(attrList.get(count), text.replace("每经记者 ", ""));
				}
				count += 1;
			}
		} else {
			elements = doc.select("body div.g-main div.g-article-left div.g-article div.g-article-top p span.source");
			if (elements.size() != 0) {
				String text = elements.get(0).text();
				if (text.split("丨").length == 1) {
					newsMap.put("source", text.split("丨")[0].trim());
					newsMap.put("author", "");
				} else if (text.split("丨").length == 2) {
					newsMap.put("author", text.split("丨")[1].replace("每经记者 ", "").trim());
					newsMap.put("source", text.split("丨")[0].trim());
				} else {
					newsMap.put("source", "");
					newsMap.put("author", "");
				}
			}
			Elements elementsTime = doc
					.select("body div.g-main div.g-article-left div.g-article div.g-article-top p span.time");
			if (elementsTime.size() != 0) {
				Date date = DateUtil.parseFormat(elementsTime.get(0).text(), "yyyy-MM-dd HH:mm:ss");
				newsMap.put("publishDate", date);
			}
		}
	}

	// 抽取内容
	public void extractContent(Document doc) {
		List<String> contents = new ArrayList<String>();
		Elements elementsContent = doc.select("body div.main div.main-left div.main-left-article p");
		if(elementsContent.size() == 0){
			elementsContent = doc.select("body div.g-main div.g-article-left div.g-article div.g-articl-text p");
		}
		for (Element element : elementsContent) {
			if (element.html().contains("<br") || element.html().contains("href"))
				continue;
			contents.add(element.text());
		}
		if (CollectionUtils.isNotEmpty(contents)) {
			putListToMap(contents, "content", true, "####");
		}
	}

	// 抽取images
	public void extractImages(Document doc, String url) {
		List<String> images = new ArrayList<String>();
		List<String> imagesNew = new ArrayList<String>();
		Elements elementsImages = doc.select("body div.main div.main-left div.main-left-article img");
		if(elementsImages.size() == 0){
			elementsImages =doc.select("body div.g-main div.g-article-left div.g-article div.g-articl-text img");
		}
		for (Element element : elementsImages) {
			// 为空
			if(StringUtils.isNotBlank(element.text())){
				images.add(element.text());
			}
		}
		if (CollectionUtils.isNotEmpty(images)) {
			imagesNew = ImageUtil.ImageUrlService(images, url);
		}
		if (CollectionUtils.isNotEmpty(imagesNew)) {
			newsMap.put("images", imagesNew);
		}
	}

	@SuppressWarnings("unchecked")
	void putListToMap(Object listObject, String putKey, boolean joinFlag, String splitMark) {
		List<String> list = new ArrayList<String>();
		if (listObject != null) {
			list = (List<String>) listObject;
		}
		// 是否需要拼接
		if (joinFlag) {
			if (CollectionUtils.isNotEmpty(list)) {
				newsMap.put(putKey, StringUtils.join(list, splitMark).replace("####　　", "####")
						.replace("AD_SURVEY_Add_AdPos(\"14213\");", ""));
			}
		} else {
			if (CollectionUtils.isNotEmpty(list)) {
				newsMap.put(putKey, list);
			}

		}
	}

	/**
	 *
	 * @param count
	 *            成功抓取url数量
	 * @param timeUsed
	 *            总耗时
	 */
	private void mailReport(int count, long timeUsed) {
		LOG.info("sending E-mail");
		String subject = "Hasky News crawler-mrjjxw";
		String platformName = DolphinCrawlerConf.getInstance().get(DolphinCrawlerConsts.PARAM_PLATFORM_NAME);
		if (!"G".equals(platformName)) {
			return;
		}
		String content = "Result:platform=%s fetching size=%s item(s) from %s with proxy=%s used total time=%s second(s)! ";
		MailUtil.sendMailMoreThanOne(subject,
				String.format(content, platformName, count, MONGONAME_COLLECTION, "" + proxy, "" + timeUsed / 1000),
				"yujunjie@summba.com", "durui@summba.com", "haitao@summba.com");
	}
	
	/**
	 * 获取种子文件
	 * @return
	 */
	public List<DBObject> getSeed() {
		List<DBObject> seeds = new ArrayList<DBObject>();
		BaseMongoRepository repo = BaseMongoTools.getInstance(DolphinCrawlerConsts.MongoDBName.ITAOGAO.getName());
        BasicDBObject keys = new BasicDBObject();
        keys.put(DolphinCrawlerConsts.MONGODB_DEFAULT_ID, 1);
        keys.put(ItaogaoConst.SCHEMA_COLUMN_NAME.CRAWL_URL.getValue(), 1);
        keys.put("logoUrl", 1);
        keys.put("title", 1);
        DBObject query = new BasicDBObject();
        query.put(ItaogaoConst.SCHEMA_COLUMN_NAME.CRAWL_FLAG.getValue(), new BasicDBObject("$eq", 0));
        DBCursor cursor = repo.getCollection(ItaogaoConst.MONGODB_TABLE.MRJJXW.getValue())
                .find(query, keys)
                .addOption(Bytes.QUERYOPTION_NOTIMEOUT);
        List<DBObject> dataIterator = cursor.toArray();
        for (DBObject data : dataIterator) {
        	 try {
        		seeds.add(data);
             } catch (Exception er) {
                 er.printStackTrace();
             }
        }
		return seeds;
	}
	
}
