package com.gxljc.bear.crawler.itaogao.oeeee;

import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import com.gxljc.commons.mongo.cli.BaseMongoRepository;
import com.gxljc.bear.crawler.base.DolphinCrawlerConsts;
import com.gxljc.bear.crawler.base.DolphinFetchData;
import com.gxljc.bear.crawler.itaogao.ItaogaoConst;
import com.gxljc.bear.crawler.itaogao.NewsPageTable;
import com.gxljc.bear.crawler.itaogao.NewsPageTemplate;
import com.gxljc.bear.crawler.itaogao.util.MongodbUtil;
import com.gxljc.bear.crawler.util.DateUtil;
import com.gxljc.bear.crawler.util.HtmlUtil;
import com.gxljc.bear.crawler.util.MD5Util;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.Update;

import java.io.Serializable;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

/**
 * 南方日报 seed 数据爬虫。
 *
 * @author tanghaitao
 * @since 2022-9-7
 */
public class OeeeeSeedTagCrawler implements Serializable {
    public static Logger LOG = Logger.getLogger(OeeeeSeedTagCrawler.class);
    private Boolean proxy = false;

    public OeeeeSeedTagCrawler(Boolean proxy) {
        this.proxy = proxy;
    }

    public void crawl() throws Exception {
        List<String> seeds = getSeed();
        if (CollectionUtils.isEmpty(seeds)) {
            LOG.error("page is empty");
            return;
        }
        crawlNoSpark(seeds);
    }

    public void crawlNoSpark(List<String> seeds) {
        for (String url : seeds) {
            try {
                crawl(url);
            } catch (Exception e) {
                e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
            }
        }
    }

    //单条url入口
    public int crawl(String url) throws Exception {
        LOG.info("crawl url = " + url);
        String trueUrl = parseTrueUrl(url);
        DolphinFetchData fetchData = DolphinFetchData.getInstance(DolphinCrawlerConsts.CrawlerChannelType.bear.getName());
        String result = fetchData.getResult(trueUrl, proxy, ItaogaoConst.CRAWL_TIMEOUT);
        if (StringUtils.isEmpty(result)) {
            LOG.error("page result is empty url = " + trueUrl);
            return DolphinCrawlerConsts.RESULT_NO;
        }
        System.out.println(result);
        List<NewsPageTable> tables = extract(result, trueUrl);
        MongodbUtil.saveMongodb(tables, ItaogaoConst.MONGODB_TABLE.OEEEE_SEED.getValue());
        return DolphinCrawlerConsts.RESULT_YES;
    }

    private String parseTrueUrl(String url) throws Exception {
        DolphinFetchData fetchData = DolphinFetchData.getInstance(DolphinCrawlerConsts.CrawlerChannelType.bear.getName());
        String result = fetchData.getEncodeHtml(url, proxy, ItaogaoConst.CRAWL_TIMEOUT);
        if (StringUtils.isEmpty(result)) {
            throw new Exception("page result is empty url = " + url);
        }
        final String parse = "URL=(node_\\d+.htm)";
        Pattern pattern = Pattern.compile(parse);
        Matcher m = pattern.matcher(result);
        if (m.find()) {
            String partUrl = m.group(1);
            return HtmlUtil.pictureUrlPref(partUrl, url);
        }
        throw new Exception("no parse url = " + url);
    }

    private List<NewsPageTable> extract(String result, String url) throws Exception {
        final Gson GSON = new Gson();
        NewsPageTemplate template = GSON.fromJson(result, new TypeToken<NewsPageTemplate>() {
        }.getType());
        if (template == null) return null;
        return parseObject(template, url);
    }

    private List<NewsPageTable> parseObject(NewsPageTemplate template, String url) throws Exception {
        List<NewsPageTable> tables = new LinkedList<>();
        List<String> crawlUrls = template.getPageUrl();
        if (CollectionUtils.isEmpty(crawlUrls)) return null;
        int size = crawlUrls.size();
        for (int i = 0; i < size; i++) {
            String crawlUrl = crawlUrls.get(i);
            NewsPageTable table = new NewsPageTable();
            crawlUrl = HtmlUtil.pictureUrlPref(crawlUrl, url);
            table.setId(genId(crawlUrl));
            table.setCrawlFlag(0);
            table.setCrawlUrl(crawlUrl);
            tables.add(table);
        }
        return tables;
    }

    //生成id
    private String genId(String href) {
        return MD5Util.getMd5(href);
    }

    //构造URL seed
    public List<String> getSeed() throws Exception {
        List<Date> dateSeed = getDateSeed();
        if (CollectionUtils.isEmpty(dateSeed)) return null;
        List<String> urls = new LinkedList<String>();
        for (Date date : dateSeed) {
            String dateFormat = DateUtil.dateFormat(date, "yyyy-MM/dd");
            String url = String.format(ItaogaoConst.NEWSPAPER_SEED.OEEEE.getValue(), dateFormat);
            urls.add(url);
        }
        return urls;
    }

    //获取从当前到1月1号的列表
    private List<Date> getDateSeed() {
        Calendar now = Calendar.getInstance();
        List<Date> list = new LinkedList<>();
        while (true) {
            list.add(now.getTime());
            now.add(Calendar.DATE, -1);
            int month = now.get(Calendar.MONTH);
            int day = now.get(Calendar.DAY_OF_MONTH);
            if (month == 0 && day == 1) {
                list.add(now.getTime());
                return list;
            }
        }
    }
}
