/**
 * 
 */
package com.gxljc.bear.crawler.itaogao.tmtpost;

import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

import com.alibaba.fastjson.JSONObject;
import com.gxljc.commons.util.Pair;
import com.gxljc.commons.util.StringUtil;
import com.gxljc.bear.crawler.base.DolphinCrawlerConf;
import com.gxljc.bear.crawler.base.DolphinCrawlerConsts;
import com.gxljc.bear.crawler.base.DolphinFetchData;
import com.gxljc.bear.crawler.itaogao.ItaogaoConst;
import com.gxljc.bear.crawler.itaogao.NewsPageTable;
import com.gxljc.bear.crawler.itaogao.util.FetchDataUtil;
import com.gxljc.bear.crawler.itaogao.util.HbaseUtil;
import com.gxljc.bear.crawler.itaogao.util.ImageUtil;
import com.gxljc.bear.crawler.itaogao.util.MongodbUtil;
import com.gxljc.bear.crawler.util.DateUtil;
import com.gxljc.bear.crawler.util.HtmlUtil;
import com.gxljc.bear.crawler.util.MailUtil;

/**
 * @author haitao E-mail:haitao@summba.com
 * @version createTime：2021年9月26日 上午11:03:26
 * 
 */

public class TmtpostCrawler {

	private final static List<String> crawlerSite = new ArrayList<String>();
	private final static String MONGONAME_COLLECTION = "t_site_tmtpost";
	private final static String URL_HEAD = "http://www.tmtpost.com";
	private final static String CONTENT_MARK = "点击此处查看英文版";
	private final static Logger LOG = Logger.getLogger(TmtpostCrawler.class);
	private Map<String, Object> newsMap = new HashMap<String, Object>();

	private List<Map<String, Object>> pageEntityInfoList = new ArrayList<Map<String, Object>>();
	private final static String SOURCE = "钛媒体-商业价值杂志";
	private static int countCrawler = 0;
	protected boolean proxy;
	private int pageNum = 1;

	public TmtpostCrawler() {
		crawlerSite.add("http://www.tmtpost.com/tag/299212/1");// 起始页面
		proxy = false;
		newsMap.put("status", 0);
		newsMap.put("mediaName", SOURCE);
	}

	/**
	 * =========爬虫启动入口(All)==========
	 */
	public void startCrawlerAll() {
		long start = System.currentTimeMillis();
		genDefaultSeeds();
		LOG.info("Start crawler detail TmtpostCrawler news!");
		crawlerNews();
		LOG.info(String.format("TmtpostCrawler news crawler finished! and cralwer all size is %s", countCrawler));
		long timeUsed = System.currentTimeMillis() - start;
		// 发送e-mail
		mailReport(countCrawler, timeUsed);
	}

	// 获取种子列表
	void genDefaultSeeds() {
		for (String seed : crawlerSite) {
			crawlerPageNum(seed);
			if (pageNum > 1) {
				for (int i = 1; i <= pageNum; i++) {
					String url = URL_HEAD + "/tag/299212/" + String.valueOf(i);
					LOG.info(String.format("crawlering url is %s", url));
					try {
						crawlPageSeed(url);
					} catch (Exception e) {
						LOG.info(String.format("crawler page url error!and url is %s", url));
						e.printStackTrace();
					}
				}
			}
			if (pageEntityInfoList.size() % 500 == 0) {
				LOG.info(String.format("pageEntityInfoList size is %s", pageEntityInfoList.size()));
			}
		}
		LOG.info(String.format("DefaultSeeds crawler finished,all size is %s", pageEntityInfoList.size()));
	}

	void crawlerPageNum(String url) {
		Pair<String, byte[]> pair = FetchDataUtil.fetResult(url, proxy);
		if (pair != null) {
			String result = "";
			result = pair.first;
			if (result != null) {
				JSONObject jsonResult = (JSONObject) JSONObject.parse(result);
				String pNum = ObjectUtils.toString(jsonResult.get("pageNum"));
				if (!pNum.isEmpty()) {
					String[] pNumSplit = pNum.split("\\/");
					pageNum = Integer.parseInt(pNumSplit[pNumSplit.length - 1]);
				}
			}

		}
		LOG.info(String.format("pageNum is %s", pageNum));
	}

	/**
	 * 抓取新闻详情
	 */
	@SuppressWarnings("unchecked")
	void crawlerNews() {
		if (pageEntityInfoList.size() == 0) {
			LOG.info(String.format("pageEntityInfoList Size is %s", pageEntityInfoList.size()));
			return;
		}
		for (Map<String, Object> map : pageEntityInfoList) {
			String seed = ObjectUtils.toString(map.get("crawlerUrl"));
			String logoUrl = ObjectUtils.toString(map.get("logoUrl"));
			if (seed.isEmpty())
				continue;
			LOG.info(String.format("crawlering seed is %s", seed));
			Pair<String, byte[]> pair = null;
			// 重复抓取次数上线设为3
			for (int i = 0; i <= 2; i++) {
				pair = FetchDataUtil.fetResult(seed, proxy);
				if (pair != null) {
					break;
				}
			}
			if (pair == null) {
				LOG.info(String.format("fetResult failed!seed is %s", seed));
				continue;
			}
			String result = "";
			result = pair.first;
			byte[] html = pair.second;
			NewsPageTable table = new NewsPageTable();
			if (result != null) {
				JSONObject jsonResult = (JSONObject) JSONObject.parse(result);

				// 基础数据
				String id = ObjectUtils.toString(StringUtil.genMD5Val(seed));
				String title = ObjectUtils.toString(jsonResult.get("title"));
				String subTitle = ObjectUtils.toString(jsonResult.get("subTitle"));
				String publishDate = ObjectUtils.toString(jsonResult.get("publicTime"));

				// 部分需要处理数据
				putContentToMap(jsonResult.get("content"), publishDate);
				putListToMap(jsonResult.get("author"), "author", true);
				putListToMap(jsonResult.get("tagName"), "tagName", false);
				putListToMap(jsonResult.get("abstract"), "abstract", true);
				if (!logoUrl.isEmpty()) {
					try {
						// 补齐相对图片路径
						logoUrl = ImageUtil.imageUrlSupply(seed, logoUrl);
						// 图片平台转换
						logoUrl = ImageUtil.imageConvertplatform(logoUrl);
					} catch (Exception e) {
						LOG.info(String.format("logoUrl convert failed! logoUrl is %s", logoUrl));
						e.printStackTrace();
					}
				}

				// 若logoUrl不为空才有images
				Object images = jsonResult.get("images");
				List<String> imagesNew = new ArrayList<String>();
				if (images != null && !logoUrl.isEmpty()) {
					imagesNew = ImageUtil.ImageUrlService((List<String>) images, seed);
				}
				if (imagesNew.size() != 0) {
					newsMap.put("images", imagesNew);
				}

				Date pd = DateUtil.parseFormat(publishDate, "yyyy-MM-dd HH:mm");
				if (pd != null) {
					newsMap.put("publishDate", pd);
				}
				// 添加基础数据
				newsMap.put("_id", id);
				newsMap.put("title", title);
				newsMap.put("subTitle", subTitle);
				newsMap.put("logoUrl", logoUrl);
				newsMap.put("crawlUrl", seed);
				newsMap.put("crawlFlag", 1);
				newsMap.put("crawlDate", new Date());
				newsMap.put("source", "");

				// 更新mongdb
				MongodbUtil.update2Mongodb(newsMap, MONGONAME_COLLECTION);

				table.setTitle(title);
				table.setId(id);

				// 更新到hbase
				HbaseUtil.saveHbase(table, html);
				countCrawler += 1;
				LOG.info(String.format("insert the id/rowkey is %s ", id));
			}

		}
	}

	/**
	 * 抓取新闻详情
	 */
	@SuppressWarnings("unchecked")
	void crawlerNewsSingel(String seed) {
		Pair<String, byte[]> pair = null;
		// 重复抓取次数上线设为3
		for (int i = 0; i <= 2; i++) {
			pair = FetchDataUtil.fetResult(seed, proxy);
			if (pair != null) {
				break;
			}
		}
		if (pair == null) {
			LOG.info(String.format("fetResult failed!seed is %s", seed));
			return;
		}
		String result = "";
		result = pair.first;
		byte[] html = pair.second;
		NewsPageTable table = new NewsPageTable();
		if (result != null) {
			JSONObject jsonResult = (JSONObject) JSONObject.parse(result);

			// 基础数据
			String id = ObjectUtils.toString(StringUtil.genMD5Val(seed));
			String title = ObjectUtils.toString(jsonResult.get("title"));
			String subTitle = ObjectUtils.toString(jsonResult.get("subTitle"));
			String logoUrl = ObjectUtils.toString(jsonResult.get("logoUrl"));
			String publishDate = ObjectUtils.toString(jsonResult.get("publicTime"));
			String source = "";

			// 部分需要处理数据
			putContentToMap(jsonResult.get("content"), publishDate);
			putListToMap(jsonResult.get("author"), "author", true);
			putListToMap(jsonResult.get("tagName"), "tagName", false);
			putListToMap(jsonResult.get("abstract"), "abstract", true);
			if (!logoUrl.isEmpty()) {
				try {
					// 补齐相对图片路径
					logoUrl = ImageUtil.imageUrlSupply(seed, logoUrl);
					// 图片平台转换
					logoUrl = ImageUtil.imageConvertplatform(logoUrl);
				} catch (Exception e) {
					LOG.info(String.format("logoUrl convert failed! logoUrl is %s", logoUrl));
					e.printStackTrace();
				}
			}

			// 若logoUrl不为空才有images
			Object images = jsonResult.get("images");
			List<String> imagesNew = new ArrayList<String>();
			if (images != null && !logoUrl.isEmpty()) {
				imagesNew = ImageUtil.ImageUrlService((List<String>) images, seed);
			}
			if (imagesNew.size() != 0) {
				newsMap.put("images", imagesNew);
			}

			Date pd = DateUtil.parseFormat(publishDate, "yyyy-MM-dd HH:mm");
			if (pd != null) {
				newsMap.put("publishDate", pd);
			}
			// 添加基础数据
			newsMap.put("_id", id);
			newsMap.put("title", title);
			newsMap.put("subTitle", subTitle);
			newsMap.put("logoUrl", logoUrl);
			newsMap.put("crawlUrl", seed);
			newsMap.put("crawlFlag", 1);
			newsMap.put("crawlDate", new Date());
			newsMap.put("source", source);

			// 更新mongdb
			MongodbUtil.update2Mongodb(newsMap, MONGONAME_COLLECTION);
			LOG.info(String.format("insert the id/rowkey is %s ", id));
			table.setTitle(title);
			table.setId(id);

			// 更新到hbase
			HbaseUtil.saveHbase(table, html);
			countCrawler += 1;
			LOG.info(String.format("insert the id/rowkey is %s ", id));
		}
	}

	@SuppressWarnings("unchecked")
	void putListToMap(Object listObject, String putKey, boolean joinFlag) {
		List<String> list = new ArrayList<String>();
		if (listObject != null) {
			list = (List<String>) listObject;
		}
		// 是否需要拼接
		if (joinFlag) {
			if (list.size() != 0) {
				newsMap.put(putKey, StringUtils.join(list, ","));
			}
		} else {
			if (list.size() != 0) {
				newsMap.put(putKey, list);
			}

		}
	}

	@SuppressWarnings("unchecked")
	void putContentToMap(Object content, String publishDate) {
		List<String> contentList = new ArrayList<String>();
		if (content != null) {
			contentList = (List<String>) content;
		}
		int contentSize = contentList.size();
		if (contentSize != 0) {
			List<String> contentNew = new ArrayList<String>();
			if (contentList.contains(CONTENT_MARK)) {
				contentNew = contentList.subList(contentList.indexOf(CONTENT_MARK) + 1, contentSize);
				newsMap.put("content", StringUtils.join(contentNew, "####").replaceAll("　　####|　　", "").trim());
			} else if (contentList.contains(publishDate)) {
				contentNew = contentList.subList(contentList.indexOf(publishDate) + 1, contentSize);
				newsMap.put("content", StringUtils.join(contentNew, "####").replaceAll("　　####|　　", "").trim());
			} else {
				newsMap.put("content", StringUtils.join(contentList, "####").replaceAll("　　####|　　", "").trim());
			}
		}
	}

	/**
	 *
	 * @param count
	 *            成功抓取url数量
	 * @param timeUsed
	 *            总耗时
	 */
	private void mailReport(int count, long timeUsed) {
		LOG.info("sending E-mail");
		String subject = "Hasky News crawler-tmtpost";
		String platformName = DolphinCrawlerConf.getInstance().get(DolphinCrawlerConsts.PARAM_PLATFORM_NAME);
		if (!"G".equals(platformName)) {
			return;
		}
		String content = "Result:platform=%s fetching size=%s item(s) from %s with proxy=%s used total time=%s second(s)! ";
		MailUtil.sendMailMoreThanOne(subject,
				String.format(content, platformName, count, MONGONAME_COLLECTION, "" + proxy, "" + timeUsed / 1000),
				"yujunjie@summba.com","durui@summba.com","haitao@summba.com");
	}

	private int extract(String html) {
		Document doc = Jsoup.parse(html);
		Elements elements = doc.select(".mod-article-list ul li");
		if (CollectionUtils.isEmpty(elements))
			return 0;
		for (Element element : elements) {
			Map<String, Object> pageEntityInfo = extractPage(element);
			pageEntityInfoList.add(pageEntityInfo);
		}
		return DolphinCrawlerConsts.RESULT_YES;
	}

	// 抽取详细信息
	@SuppressWarnings("unused")
	private Map<String, Object> extractPage(Element element) {
		Map<String, Object> re = new HashMap<String, Object>();
		Elements elements = element.select(".cont h3 a");
		if (CollectionUtils.isEmpty(elements))
			return new HashMap<>();
		String title = elements.get(0).text();
		String url = elements.get(0).attr("href");
		if (StringUtils.isEmpty(url))
			return new HashMap<>();
		re.put("crawlerUrl", HtmlUtil.pictureUrlPref(url, ItaogaoConst.TMTPOST_DOMAIN));
		re.put("logoUrl", extractLogoUrl(element));
		return re;
	}

	private String extractLogoUrl(Element element) {
		Elements elements = element.select(".pic a img");
		if (CollectionUtils.isEmpty(elements))
			return "";
		String image = elements.get(0).attr("src");
		return image;
	}

	public int crawlPageSeed(String url) throws Exception {
		LOG.info("crawl url = " + url);
		DolphinFetchData fetchData = DolphinFetchData
				.getInstance(DolphinCrawlerConsts.CrawlerChannelType.MOVIE.getName());
		byte[] htmlByte = fetchData.getHtml(url, proxy, ItaogaoConst.CRAWL_TIMEOUT);
		if (htmlByte == null) {
			LOG.error("page result is empty url = " + url);
			return DolphinCrawlerConsts.RESULT_NO;
		}
		String html = new String(htmlByte);
		if (StringUtils.isEmpty(html)) {
			LOG.error("page result is empty url = " + url);
			return DolphinCrawlerConsts.RESULT_NO;
		}
		extract(html);
		return DolphinCrawlerConsts.RESULT_YES;

	}
}
