package com.gxljc.bear.crawler.itaogao.cbcomcn;

import com.gxljc.commons.mongo.cli.BaseMongoRepository;
import com.gxljc.bear.crawler.base.*;
import com.gxljc.bear.crawler.itaogao.ItaogaoConst;
import com.gxljc.bear.crawler.itaogao.NewsPageTable;
import com.gxljc.bear.crawler.itaogao.util.MongodbUtil;
import com.gxljc.bear.crawler.util.DateUtil;
import com.gxljc.bear.crawler.util.HtmlUtil;
import com.gxljc.bear.crawler.util.MD5Util;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.log4j.Logger;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.Update;

import java.io.Serializable;
import java.net.URLEncoder;
import java.util.*;

/**
 * 中国经营报 seed 数据爬虫。
 *
 * @author tanghaitao
 * @since 2022-9-8
 */
public class CbcomcnSeedCrawler implements Serializable {
    public static Logger LOG = Logger.getLogger(CbcomcnSeedCrawler.class);
    private Boolean proxy = false;
    private Map<String, String> proper = new HashMap<String, String>();

    public CbcomcnSeedCrawler(Boolean proxy) {
        proper.put("Cookie", CookieAccount.getInstance().getCbcomcnCookie());
//        proper.put("Cookie", "__jsluid=2319357cf501b43b1a787466f6fad34e; __jsl_clearance=1474257434.716|0|0OAqOFkYTUB19tjshwcxQCMLwqA%3D");
        proper.put("Host", "dianzibao.cb.com.cn");
        this.proxy = proxy;
    }

    public void crawl() throws Exception {
        List<String> seeds = getSeed();
        if (CollectionUtils.isEmpty(seeds)) {
            LOG.error("page is empty");
            return;
        }
        crawlNoSpark(seeds);
    }

    public void crawlNoSpark(List<String> seeds) {
        for (String url : seeds) {
            try {
                crawl(url);
//                break;
            } catch (Exception e) {
                e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
            }
        }
    }

    //单条url入口
    public int crawl(String url) throws Exception {
//        url = "http://dianzibao.cb.com.cn/html/2016-09/05/node_1.htm";
//        url = "http://dianzibao.cb.com.cn/";
        LOG.info("crawl url = " + url);
        String newUrl = genCrawlUrl(url);
        LOG.info("crawl newUrl = " + newUrl);
        DolphinFetchData fetchData = DolphinFetchData.getInstance(DolphinCrawlerConsts.CrawlerChannelType.bear.getName());
        String html = fetchData.getEncodeHtml(newUrl, proxy, ItaogaoConst.CRAWL_TIMEOUT, proper);
//        System.out.println(html);
        if (StringUtils.isEmpty(html)) {
            LOG.error("page result is empty url = " + url);
            return DolphinCrawlerConsts.RESULT_NO;
        }
        List<NewsPageTable> tables = extract(html, url);
        MongodbUtil.saveMongodb(tables, ItaogaoConst.MONGODB_TABLE.CBCOMCN.getValue());
        return DolphinCrawlerConsts.RESULT_YES;
    }

    private String genCrawlUrl(String url) throws Exception {
        Configuration conf = DolphinCrawlerConf.getInstance();
        String webHost = conf.get(DolphinCrawlerConsts.PARAM_DETECTOR_WEB_HOST);
        if (StringUtils.isEmpty(webHost)) return url;
        String newUrl = URLEncoder.encode(url, "utf-8");
        return String.format(webHost + ItaogaoConst.DETECTOR_WEB_PATH, DolphinCrawlerConsts.PARAM_DETECTOR_WEB_ID, DolphinCrawlerConsts.PARAM_DETECTOR_WEB_TYPE, newUrl);
    }

    private List<NewsPageTable> extract(String html, String url) throws Exception {
        Document doc = Jsoup.parse(html);
        Elements elements = doc.select(".ul02_l li a");
        if (CollectionUtils.isEmpty(elements)) return null;
        List<NewsPageTable> tables = new LinkedList<>();
        for (Element element : elements) {
            String crawlUrl = element.attr("href");
            NewsPageTable table = new NewsPageTable();
            crawlUrl = HtmlUtil.pictureUrlPref(crawlUrl, url);
            table.setId(genId(crawlUrl));
//            table.setCrawlFlag(0);
            table.setCrawlUrl(crawlUrl);
            tables.add(table);
        }
        return tables;
    }

    //生成id
    private String genId(String href) {
        return MD5Util.getMd5(href);
    }

    //构造URL seed
    public List<String> getSeed() throws Exception {
        List<Date> dateSeed = getDateSeed();
        if (CollectionUtils.isEmpty(dateSeed)) return null;
        List<String> urls = new LinkedList<String>();
        final int MAX_NODE = 50;
        for (Date date : dateSeed) {
            String dateFormat = DateUtil.dateFormat(date, "yyyy-MM/dd");
            for (int i = 1; i < MAX_NODE; i++) {
                String url = String.format(ItaogaoConst.NEWSPAPER_SEED.CBCOMCN.getValue(), dateFormat, i + "");
                urls.add(url);
            }

        }
        return urls;
    }

    //获取从当前到1月1号的列表
    private List<Date> getDateSeed() {
        Calendar now = Calendar.getInstance();
        List<Date> list = new LinkedList<>();
        while (true) {
            int month = now.get(Calendar.MONTH);
            int day = now.get(Calendar.DAY_OF_MONTH);
            int week = now.get(Calendar.DAY_OF_WEEK);
            if (week == 2) {    //周一
                list.add(now.getTime());
            }
            if (month == 0 && day == 1) {
                return list;
            }
            now.add(Calendar.DATE, -1);
        }
    }
}
