package com.gxljc.bear.crawler.itaogao.tmtpost;

import com.mongodb.BasicDBObject;
import com.mongodb.Bytes;
import com.mongodb.DBCursor;
import com.mongodb.DBObject;
import com.gxljc.commons.mongo.cli.BaseMongoRepository;
import com.gxljc.bear.crawler.base.BaseMongoTools;
import com.gxljc.bear.crawler.base.DolphinCrawlerConsts;
import com.gxljc.bear.crawler.base.DolphinFetchData;
import com.gxljc.bear.crawler.itaogao.ItaogaoConst;
import com.gxljc.bear.crawler.util.SparkUtil;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.select.Elements;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.Update;

import java.io.Serializable;
import java.lang.reflect.Field;
import java.util.*;

/**
 * 钛媒体PAGESIZE数据爬虫。
 *
 * @author tanghaitao
 * @since 2022-8-4
 */
public class TmtpostPageSizeCrawler implements Serializable {
	public static Logger LOG = Logger.getLogger(TmtpostPageSizeCrawler.class);
	private Boolean proxy = false;

	public TmtpostPageSizeCrawler(Boolean proxy) {
		this.proxy = proxy;
	}

	public void crawl() throws Exception {
		List<TmtpostTagTable> seeds = getSeed();
		if (CollectionUtils.isEmpty(seeds)) {
			LOG.error("page is empty");
			return;
		}
		crawl(seeds);
	}

	public void crawl(List<TmtpostTagTable> seeds) {
		int coreMax = 15;
		JavaSparkContext jsc = SparkUtil.createCommonsSparkContext(
				"dolphin-钛媒体tag page size爬取-" + seeds.size(), coreMax,
				coreMax * 2, TmtpostPageSizeCrawler.class);
		JavaRDD<TmtpostTagTable> seedsRDD = jsc.parallelize(new ArrayList<TmtpostTagTable>(seeds));
		long count = seedsRDD.mapPartitions(
				new FlatMapFunction<Iterator<TmtpostTagTable>, Integer>() {
					private static final long serialVersionUID = 1L;

					@Override
					public Iterable<Integer> call(Iterator<TmtpostTagTable> seeds)
							throws Exception {
						int successCnt = 0;
						while (seeds.hasNext()) {
							try {
								TmtpostTagTable seed = seeds.next();
								int ret = crawl(seed);
								if (ret == 1)
									successCnt++;
								if (successCnt % 100 == 0) {
									LOG.info("successCnt=" + successCnt);
								}
							} catch (Exception er) {
								er.printStackTrace();
							}
						}
						return Arrays.asList(successCnt);
					}
				}).count();
		LOG.info("all count=" + count);
		jsc.stop();
	}

	//单条url入口
	public int crawl(TmtpostTagTable table) throws Exception {
		String url = table.crawlUrl;
		LOG.info("crawl url = " + url);
		DolphinFetchData fetchData = DolphinFetchData.getInstance(DolphinCrawlerConsts.CrawlerChannelType.MOVIE.getName());
		byte[] htmlByte = fetchData.getHtml(url, proxy, ItaogaoConst.CRAWL_TIMEOUT);
		if (htmlByte == null) {
			LOG.error("page result is empty url = " + url);
			return DolphinCrawlerConsts.RESULT_NO;
		}
		String html = new String(htmlByte);
		if (StringUtils.isEmpty(html)) {
			LOG.error("page result is empty url = " + url);
			return DolphinCrawlerConsts.RESULT_NO;
		}
		extract(table,html);
		saveMongodb(table);
		return DolphinCrawlerConsts.RESULT_YES;
	}

	private void extract(TmtpostTagTable table,String html) {
		int pageSize = extractPageSize(html);
		table.setPageSize(pageSize);
	}

	private int extractPageSize(String html) {
		Document doc = Jsoup.parse(html);
		Elements sizeElement = doc.select(".user-article-list h2 .num");
		if (CollectionUtils.isEmpty(sizeElement)) return 0;
		String pageSizeStr = sizeElement.get(0).text();
		return Integer.parseInt(pageSizeStr);
	}

	//save data to mongodb
	public void saveMongodb(TmtpostTagTable table) throws Exception {
		Query query = new Query();
		query.addCriteria(Criteria.where(DolphinCrawlerConsts.MONGODB_DEFAULT_ID).is(table.tagId));
		Update update = new Update();
		Field[] fields = table.getClass().getDeclaredFields();
		for (int i = 0; i < fields.length; i++) {
			Object value = fields[i].get(table);
			if (value == null) continue;
			if (value instanceof List) {
				if (CollectionUtils.isEmpty((List) value)) continue;
			}
			if (value instanceof String) {
				if (StringUtils.isEmpty((String) value)) continue;
			}
			update.set(fields[i].getName(), value);
		}
		BaseMongoRepository repo = BaseMongoTools.getInstance(DolphinCrawlerConsts.MongoDBName.ITAOGAO.getName());
		repo.upsert(query, update, ItaogaoConst.MONGODB_TMTPOST_TABLE_TAG);
	}

	//获取seed
	public List<TmtpostTagTable> getSeed() throws Exception {
		List<TmtpostTagTable> seeds = new LinkedList<TmtpostTagTable>();
		BaseMongoRepository repo = BaseMongoTools.getInstance(DolphinCrawlerConsts.MongoDBName.ITAOGAO.getName());
		BasicDBObject keys = new BasicDBObject();
		keys.put(DolphinCrawlerConsts.MONGODB_DEFAULT_ID, 1);
		keys.put(ItaogaoConst.SCHEMA_COLUMN_NAME.CRAWL_URL.getValue(), 1);
		DBObject query = new BasicDBObject();
		DBCursor cursor = repo.getCollection(ItaogaoConst.MONGODB_TMTPOST_TABLE_TAG)
				.find(query, keys)
				.addOption(Bytes.QUERYOPTION_NOTIMEOUT);
		List<DBObject> dataIterator = cursor.toArray();
		for (DBObject data : dataIterator) {
			try {
				Object id = data.get(DolphinCrawlerConsts.MONGODB_DEFAULT_ID);
				Object url = data.get(ItaogaoConst.SCHEMA_COLUMN_NAME.CRAWL_URL.getValue());
				TmtpostTagTable table = new TmtpostTagTable();
				table.setTagId(id.toString());
				table.setCrawlUrl(url.toString());
				seeds.add(table);
			} catch (Exception er) {
				er.printStackTrace();
			}
		}
		return seeds;
	}
}
