package com.gxljc.bear.crawler.itaogao.tmtpost;

import com.mongodb.BasicDBObject;
import com.mongodb.Bytes;
import com.mongodb.DBCursor;
import com.mongodb.DBObject;
import com.gxljc.commons.mongo.cli.BaseMongoRepository;
import com.gxljc.bear.crawler.base.BaseMongoTools;
import com.gxljc.bear.crawler.base.DolphinCrawlerConf;
import com.gxljc.bear.crawler.base.DolphinCrawlerConsts;
import com.gxljc.bear.crawler.base.DolphinFetchData;
import com.gxljc.bear.crawler.itaogao.ItaogaoConst;
import com.gxljc.bear.crawler.util.SparkUtil;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.log4j.Logger;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.select.Elements;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.Update;

import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.Field;
import java.util.*;

/**
 * 钛媒体PAGE数据爬虫。
 *
 * @author tanghaitao
 * @since 2022-8-5
 */
public class TmtpostPageCrawler implements Serializable {
	public static Logger LOG = Logger.getLogger(TmtpostPageCrawler.class);
	private Boolean proxy = false;

	public TmtpostPageCrawler(Boolean proxy) {
		this.proxy = proxy;
	}

	public void crawl() throws Exception {
		List<TmtpostPageTable>  seeds =  getSeed();
		if(CollectionUtils.isEmpty(seeds)){
			LOG.error("page is empty");
			return ;
		}
		crawl(seeds);
	}

	public void crawlNoSpark(List<TmtpostPageTable>  seeds)  {
		if(CollectionUtils.isEmpty(seeds)){
			LOG.error("page is empty");
			return ;
		}
		for (TmtpostPageTable table : seeds){
			try {
				crawl(table);
			} catch (Exception e) {
				e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
			}
		}
	}

	public void crawl(List<TmtpostPageTable> seeds) {
		int coreMax = 15;
		JavaSparkContext jsc = SparkUtil.createCommonsSparkContext(
				"dolphin-钛媒体page详情爬取-" + seeds.size(), coreMax,
				coreMax * 2, TmtpostPageCrawler.class);
		JavaRDD<TmtpostPageTable> seedsRDD = jsc.parallelize(new ArrayList<TmtpostPageTable>(seeds));
		long count = seedsRDD.mapPartitions(
				new FlatMapFunction<Iterator<TmtpostPageTable>, Integer>() {
					private static final long serialVersionUID = 1L;

					@Override
					public Iterable<Integer> call(Iterator<TmtpostPageTable> seeds)
							throws Exception {
						int successCnt = 0;
						while (seeds.hasNext()) {
							try {
								TmtpostPageTable seed = seeds.next();
								int ret = crawl(seed);
								if (ret == 1)
									successCnt++;
								if (successCnt % 100 == 0) {
									LOG.info("successCnt=" + successCnt);
								}
							} catch (Exception er) {
								er.printStackTrace();
							}
						}
						return Arrays.asList(successCnt);
					}
				}).count();
		LOG.info("all count=" + count);
		jsc.stop();
	}

	//单条url入口
	public int crawl(TmtpostPageTable table) throws Exception {
		LOG.info("crawl = " + table.crawlUrl);
		String url = table.getCrawlUrl();
		DolphinFetchData fetchData = DolphinFetchData.getInstance(DolphinCrawlerConsts.CrawlerChannelType.MOVIE.getName());
		byte[] htmlByte = fetchData.getHtml(url, proxy, ItaogaoConst.CRAWL_TIMEOUT);
		if (htmlByte == null) {
			LOG.error("page result is empty url = " + url);
			return DolphinCrawlerConsts.RESULT_NO;
		}
		String html = new String(htmlByte);
		if (StringUtils.isEmpty(html)) {
			LOG.error("page result is empty url = " + url);
			return DolphinCrawlerConsts.RESULT_NO;
		}
		extract(table, html);
		String content = table.getContent();
		if(StringUtils.isEmpty(content)) return DolphinCrawlerConsts.RESULT_NO;
		saveHbase(table, htmlByte);
		saveMongodbPage(table);
		return DolphinCrawlerConsts.RESULT_YES;
	}

	//内容抽取
	private void extractContent(TmtpostPageTable table, Document doc){
		Elements contentElement = doc.select("article .inner");
		if (CollectionUtils.isEmpty(contentElement)) return;
		String content = contentElement.text();
		if(StringUtils.isEmpty(content)) return ;
		table.setContent(content);
	}

	//抽取object
	private void extract(TmtpostPageTable table, String html){
		Document doc = Jsoup.parse(html);
		extractContent(table, doc);
		table.setCrawlFlag(1);
	}


	public boolean saveHbase(TmtpostPageTable table, byte[] htmlByte) {
		try {
			Configuration conf = DolphinCrawlerConf.getInstance();
			HTable htable = new HTable(conf, ItaogaoConst.HBASE_TABLE_PAGE);
			String rowKey = table.getPageId();
			Put put = new Put(org.apache.hadoop.hbase.util.Bytes.toBytes(rowKey));
			if (htmlByte != null) {
				put.add(org.apache.hadoop.hbase.util.Bytes.toBytes(ItaogaoConst.HBASE_FAMILY_DATA),
						org.apache.hadoop.hbase.util.Bytes.toBytes(ItaogaoConst.SCHEMA_COLUMN_NAME.HTML.getValue()), htmlByte);
			}
			if(StringUtils.isNotEmpty(table.title)){
				put.add(org.apache.hadoop.hbase.util.Bytes.toBytes(ItaogaoConst.HBASE_FAMILY_DATA),
					org.apache.hadoop.hbase.util.Bytes.toBytes(ItaogaoConst.SCHEMA_COLUMN_NAME.TITLE.getValue()),
					org.apache.hadoop.hbase.util.Bytes.toBytes(table.title));
			}
			try {
				htable.put(put);
				return true;
			} catch (Exception e) {
				e.printStackTrace();
				return false;
			}

		} catch (IOException e) {
			e.printStackTrace();
			return false;
		}
	}

	//save data to mongodb
	public void saveMongodbPage(TmtpostPageTable table) throws Exception {
		Query query = new Query();
		query.addCriteria(Criteria.where(DolphinCrawlerConsts.MONGODB_DEFAULT_ID).is(table.pageId));
		Update update = new Update();
		Field[] fields = table.getClass().getDeclaredFields();
		for (int i = 0; i < fields.length; i++) {
			Object value = fields[i].get(table);
			if (value == null) continue;
			if (value instanceof List)
				if (CollectionUtils.isEmpty((List) value)) continue;
			if (value instanceof String)
				if (StringUtils.isEmpty((String) value)) continue;
			update.set(fields[i].getName(), value);
		}
		BaseMongoRepository repo = BaseMongoTools.getInstance(DolphinCrawlerConsts.MongoDBName.ITAOGAO.getName());
		repo.upsert(query, update, ItaogaoConst.MONGODB_TMTPOST_TABLE_PAGE);
	}

	//获取seed
	public List<TmtpostPageTable> getSeed() throws Exception {
		BaseMongoRepository repo = BaseMongoTools.getInstance(DolphinCrawlerConsts.MongoDBName.ITAOGAO.getName());
		BasicDBObject keys = new BasicDBObject();
		keys.put(DolphinCrawlerConsts.MONGODB_DEFAULT_ID, 1);
		keys.put(ItaogaoConst.SCHEMA_COLUMN_NAME.CRAWL_URL.getValue(), 1);
		keys.put(ItaogaoConst.SCHEMA_COLUMN_NAME.TITLE.getValue(), 1);
		DBObject query = new BasicDBObject();
		query.put(ItaogaoConst.SCHEMA_COLUMN_NAME.CRAWL_FLAG.getValue(), new BasicDBObject("$eq",0));
		DBCursor cursor = repo.getCollection(ItaogaoConst.MONGODB_TMTPOST_TABLE_PAGE)
				.find(query, keys)
				.addOption(Bytes.QUERYOPTION_NOTIMEOUT);
		List<TmtpostPageTable> seeds = new LinkedList<>();
		List<DBObject> dataIterator = cursor.toArray();
		for (DBObject data : dataIterator) {
			try{
				Object id = data.get(DolphinCrawlerConsts.MONGODB_DEFAULT_ID);
				Object url = data.get(ItaogaoConst.SCHEMA_COLUMN_NAME.CRAWL_URL.getValue());
				Object title = data.get(ItaogaoConst.SCHEMA_COLUMN_NAME.TITLE.getValue());
				TmtpostPageTable table = new TmtpostPageTable();
				table.setPageId(id.toString());
				table.setCrawlUrl(url.toString());
				if(title != null)
					table.setTitle(title.toString());
				seeds.add(table);
			}catch (Exception er){
				er.printStackTrace();
			}
		}
		return seeds;
	}
}
