package com.gxljc.bear.crawler.itaogao.oeeee;

import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import com.mongodb.BasicDBObject;
import com.mongodb.Bytes;
import com.mongodb.DBCursor;
import com.mongodb.DBObject;
import com.gxljc.commons.mongo.cli.BaseMongoRepository;
import com.gxljc.bear.crawler.base.BaseMongoTools;
import com.gxljc.bear.crawler.base.DolphinCrawlerConsts;
import com.gxljc.bear.crawler.base.DolphinFetchData;
import com.gxljc.bear.crawler.itaogao.ItaogaoConst;
import com.gxljc.bear.crawler.itaogao.NewsPageTable;
import com.gxljc.bear.crawler.itaogao.NewsPageTemplate;
import com.gxljc.bear.crawler.itaogao.util.MongodbUtil;
import com.gxljc.bear.crawler.proxy.ProxyUtil;
import com.gxljc.bear.crawler.util.HtmlUtil;
import com.gxljc.bear.crawler.util.MD5Util;
import com.gxljc.bear.crawler.util.SparkUtil;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;

import java.io.Serializable;
import java.util.*;

/**
 * 南方都市报 seed 数据爬虫。
 *
 * @author tanghaitao
 * @since 2022-9-22
 */
public class OeeeeSeedCrawler implements Serializable {
    public static Logger LOG = Logger.getLogger(OeeeeSeedCrawler.class);
    private Boolean proxy = false;

    public OeeeeSeedCrawler(Boolean proxy) {
        this.proxy = proxy;
    }

    public void crawl() throws Exception {
        List<NewsPageTable> seeds = getSeed();
        if (CollectionUtils.isEmpty(seeds)) {
            LOG.error("page is empty");
            return;
        }
        ProxyUtil.initQueue(proxy);
        if (seeds.size() < DolphinCrawlerConsts.SINGLE_CORE_TO_RUN_SIZE) {
            crawlNoSpark(seeds);
        } else {
            crawl(seeds);
        }
    }

    public void crawlNoSpark(List<NewsPageTable> seeds) {
        for (NewsPageTable table : seeds) {
            try {
                crawl(table);
            } catch (Exception e) {
                e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
            }
        }
    }

    public void crawl(List<NewsPageTable> seeds) {
        int coreMax = 10;
        JavaSparkContext jsc = SparkUtil.createCommonsSparkContext(
                "bear-南方都市报 种子爬取-" + seeds.size(), coreMax,
                coreMax * 2, OeeeeSeedCrawler.class);
        JavaRDD<NewsPageTable> seedsRDD = jsc.parallelize(new ArrayList<NewsPageTable>(seeds));
        long count = seedsRDD.mapPartitions(
                new FlatMapFunction<Iterator<NewsPageTable>, Integer>() {
                    private static final long serialVersionUID = 1L;

                    @Override
                    public Iterable<Integer> call(Iterator<NewsPageTable> seeds)
                            throws Exception {
                        int successCnt = 0;
                        while (seeds.hasNext()) {
                            try {
                                NewsPageTable seed = seeds.next();
                                int ret = crawl(seed);
                                if (ret == 1)
                                    successCnt++;
                                if (successCnt % 100 == 0) {
                                    LOG.info("successCnt=" + successCnt);
                                }
                            } catch (Exception er) {
                                er.printStackTrace();
                            }
                        }
                        return Arrays.asList(successCnt);
                    }
                }).count();
        LOG.info("all count=" + count);
        jsc.stop();
    }

    //单条url入口
    public int crawl(NewsPageTable table) throws Exception {
        String url = table.crawlUrl;
        LOG.info("crawl url = " + url);
        DolphinFetchData fetchData = DolphinFetchData.getInstance(DolphinCrawlerConsts.CrawlerChannelType.bear.getName());
        String result = fetchData.getResult(url, proxy, ItaogaoConst.CRAWL_TIMEOUT);
        if (StringUtils.isEmpty(result)) {
            LOG.error("page result is empty url = " + url);
            return DolphinCrawlerConsts.RESULT_NO;
        }
        List<NewsPageTable> tables = extract(result, url);
        MongodbUtil.saveMongodb(tables, ItaogaoConst.MONGODB_TABLE.OEEEE_PAGE.getValue());
        saveSeed(table);
        return DolphinCrawlerConsts.RESULT_YES;
    }

    //更新seed mongodb库
    private void saveSeed(NewsPageTable table) throws Exception {
        table.setCrawlFlag(1);
        MongodbUtil.saveMongodb(table, ItaogaoConst.MONGODB_TABLE.OEEEE_SEED.getValue());
    }

    private List<NewsPageTable> extract(String result, String url) throws Exception {
        final Gson GSON = new Gson();
        NewsPageTemplate template = GSON.fromJson(result, new TypeToken<NewsPageTemplate>() {
        }.getType());
        if (template == null) return null;
        return parseObject(template, url);
    }

    private List<NewsPageTable> parseObject(NewsPageTemplate template, String url) throws Exception {
        List<NewsPageTable> tables = new LinkedList<>();
        List<String> crawlUrls = template.getCrawlUrl();
        if (CollectionUtils.isEmpty(crawlUrls)) return null;
        int size = crawlUrls.size();
        for (int i = 0; i < size; i++) {
            String crawlUrl = crawlUrls.get(i);
            NewsPageTable table = new NewsPageTable();
            crawlUrl = HtmlUtil.pictureUrlPref(crawlUrl, url);
            table.setId(genId(crawlUrl));
//            table.setCrawlFlag(0);
            table.setCrawlUrl(crawlUrl);
            tables.add(table);
        }
        return tables;
    }

    //生成id
    private String genId(String href) {
        return MD5Util.getMd5(href);
    }

    //获取seed
    public List<NewsPageTable> getSeed() throws Exception {
        BaseMongoRepository repo = BaseMongoTools.getInstance(DolphinCrawlerConsts.MongoDBName.ITAOGAO.getName());
        BasicDBObject keys = new BasicDBObject();
        keys.put(DolphinCrawlerConsts.MONGODB_DEFAULT_ID, 1);
        keys.put(ItaogaoConst.SCHEMA_COLUMN_NAME.CRAWL_URL.getValue(), 1);
        DBObject query = new BasicDBObject();
        query.put(ItaogaoConst.SCHEMA_COLUMN_NAME.CRAWL_FLAG.getValue(), new BasicDBObject("$eq", 0));
        DBCursor cursor = repo.getCollection(ItaogaoConst.MONGODB_TABLE.OEEEE_SEED.getValue())
                .find(query, keys)
                .addOption(Bytes.QUERYOPTION_NOTIMEOUT);
        List<NewsPageTable> seeds = new LinkedList<>();
        List<DBObject> dataIterator = cursor.toArray();
        for (DBObject data : dataIterator) {
            try {
                Object id = data.get(DolphinCrawlerConsts.MONGODB_DEFAULT_ID);
                Object url = data.get(ItaogaoConst.SCHEMA_COLUMN_NAME.CRAWL_URL.getValue());
                NewsPageTable table = new NewsPageTable();
                table.setId(id.toString());
                table.setCrawlUrl(url.toString());
                seeds.add(table);
            } catch (Exception er) {
                er.printStackTrace();
            }
        }
        return seeds;
    }
}
