package com.gxljc.bear.crawler.itaogao.southcn;

import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import com.mongodb.BasicDBObject;
import com.mongodb.Bytes;
import com.mongodb.DBCursor;
import com.mongodb.DBObject;
import com.gxljc.commons.mongo.cli.BaseMongoRepository;
import com.gxljc.commons.util.Pair;
import com.gxljc.bear.crawler.base.BaseMongoTools;
import com.gxljc.bear.crawler.base.DolphinCrawlerConsts;
import com.gxljc.bear.crawler.base.DolphinFetchData;
import com.gxljc.bear.crawler.image.ImageFresh;
import com.gxljc.bear.crawler.itaogao.ItaogaoConst;
import com.gxljc.bear.crawler.itaogao.NewsPageTable;
import com.gxljc.bear.crawler.itaogao.NewsPageTemplate;
import com.gxljc.bear.crawler.itaogao.util.HbaseUtil;
import com.gxljc.bear.crawler.itaogao.util.HttpUtil;
import com.gxljc.bear.crawler.itaogao.util.MongodbUtil;
import com.gxljc.bear.crawler.proxy.ProxyUtil;
import com.gxljc.bear.crawler.util.DateUtil;
import com.gxljc.bear.crawler.util.HtmlUtil;
import com.gxljc.bear.crawler.util.SparkUtil;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.log4j.Logger;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.Update;

import java.io.Serializable;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

/**
 * 南方日报 PAGE 数据爬虫。
 *
 * @author tanghaitao
 * @since 2022-9-6
 */
public class SouthcnPageCrawler implements Serializable {
    public static Logger LOG = Logger.getLogger(SouthcnPageCrawler.class);
    private Boolean proxy = false;
    private final int CONTENT_LESS_NUM = 80; //内容少于多少即放弃

    public SouthcnPageCrawler(Boolean proxy) {
        this.proxy = proxy;
    }

    public void crawl() throws Exception {
        List<NewsPageTable> seeds = getSeed();
        if (CollectionUtils.isEmpty(seeds)) {
            LOG.error("page is empty");
            return;
        }
        crawl(seeds);
    }

    public void crawlNoSpark(List<NewsPageTable> seeds) {
        if (CollectionUtils.isEmpty(seeds)) {
            LOG.error("page is empty");
            return;
        }
        ProxyUtil.initQueue(proxy);
        for (NewsPageTable table : seeds) {
            try {
                crawl(table);
            } catch (Exception e) {
                e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
            }
        }
    }

    public void crawl(List<NewsPageTable> seeds) {
        int coreMax = 10;
        JavaSparkContext jsc = SparkUtil.createCommonsSparkContext(
                "bear-南方日报 page详情爬取-" + seeds.size(), coreMax,
                coreMax * 2, SouthcnPageCrawler.class);
        JavaRDD<NewsPageTable> seedsRDD = jsc.parallelize(new ArrayList<NewsPageTable>(seeds));
        long count = seedsRDD.mapPartitions(
                new FlatMapFunction<Iterator<NewsPageTable>, Integer>() {
                    private static final long serialVersionUID = 1L;

                    @Override
                    public Iterable<Integer> call(Iterator<NewsPageTable> seeds)
                            throws Exception {
                        int successCnt = 0;
                        while (seeds.hasNext()) {
                            try {
                                NewsPageTable seed = seeds.next();
                                int ret = crawl(seed);
                                if (ret == 1)
                                    successCnt++;
                                if (successCnt % 100 == 0) {
                                    LOG.info("successCnt=" + successCnt);
                                }
                            } catch (Exception er) {
                                er.printStackTrace();
                            }
                        }
                        return Arrays.asList(successCnt);
                    }
                }).count();
        LOG.info("all count=" + count);
        jsc.stop();
    }

    //单条url入口
    public int crawl(NewsPageTable table) throws Exception {
        LOG.info("crawl = " + table.crawlUrl);
        String url = table.getCrawlUrl();
        DolphinFetchData fetchData = DolphinFetchData.getInstance(DolphinCrawlerConsts.CrawlerChannelType.bear.getName());
        Pair<String, byte[]> pair = fetchData.getResult2Pair(url, proxy, ItaogaoConst.CRAWL_TIMEOUT);
        if (pair == null) {
            LOG.error("page result is empty url = " + url);
            return DolphinCrawlerConsts.RESULT_NO;
        }
        extract(table, pair.first);
        HbaseUtil.saveHbase(table, pair.second);
        MongodbUtil.saveMongodb(table, ItaogaoConst.MONGODB_TABLE.SOUTHCN.getValue());
        return DolphinCrawlerConsts.RESULT_YES;
    }

    //内容抽取
    private void extractContent(NewsPageTable table, NewsPageTemplate template) {
        List<String> contents = template.getContent();
        if (CollectionUtils.isEmpty(contents)) return;
        StringBuffer sb = new StringBuffer();
        for (String content : contents) {
            if (sb.length() > 0) sb.append(DolphinCrawlerConsts.DEFAULT_LINE_BREAK_WORD);
            content = content.trim();
            while (content.startsWith(" ")) {
                content = content.substring(1);
            }
            sb.append(content);
        }
        table.setContent(sb.toString());
    }

    //发布时间抽取
    private void extractPublishDate(NewsPageTable table, NewsPageTemplate template) {
        String publishDate = template.getPublishDate();
        if (StringUtils.isEmpty(publishDate)) return;
        Date date = DateUtil.parseFormat(publishDate, "yyyy-MM-dd");
        table.setPublishDate(date);
    }

    //标题抽取
    private void extractTitle(NewsPageTable table, NewsPageTemplate template) {
        List<String> title = template.getTitle();
        if (CollectionUtils.isEmpty(title)) return;
        table.setTitle(title.get(0));
        String subTitle = template.subTitle;
        if (StringUtils.isEmpty(subTitle)) return;
        subTitle = subTitle.replace("——", "");
        table.setSubTitle(subTitle);
    }


    //作者抽取
    private void extractAuthor(NewsPageTable table) {
        try {
            String author = null;
            if (StringUtils.isEmpty(table.content)) return;
            if (table.content.length() < CONTENT_LESS_NUM) return;
            if (table.content.indexOf(DolphinCrawlerConsts.DEFAULT_LINE_BREAK_WORD) < 0) return;
            String[] lines = table.content.split(DolphinCrawlerConsts.DEFAULT_LINE_BREAK_WORD);
            for (String line : lines) {
                author = parseAuthor(line);
                if (!StringUtils.isEmpty(author)) {
                    table.setAuthor(author);
                    return;
                }
            }
        } catch (Exception er) {
            er.printStackTrace();
            LOG.error("extract author error, url=" + table.crawlUrl, er);
        }
    }

    //抽取作者
    private String parseAuthor(String text) {
        final String[] parse = {"南方日报讯[ ]?（记者/(.*?)[ ）]", "南方日报记者 (.*?) 实习生"
                , "南方日报记者 (.*?) ", "撰文：南方日报记者 (.*?)$",
                "南方日报里约电[ ]?（特派记者/(.*?)）"};
        for (String p : parse) {
            Pattern pattern = Pattern.compile(p);
            Matcher m = pattern.matcher(text);
            if (m.find()) {
                return m.group(1);
            }
        }
        return null;
    }

     //缩略图抽取
    private void extractImage(NewsPageTable table, NewsPageTemplate template) throws Exception {
        List<String> images = template.images;
        if (CollectionUtils.isEmpty(images)) return;
        List<String> saveImages = new LinkedList<>();
        ImageFresh fresh = ImageFresh.getInstance();
        for (String image : images) {
            String newUrl = HtmlUtil.pictureUrlPref(image, table.crawlUrl);
            Pair<Integer, String> newPicPair = fresh.freshOne(newUrl);
            String newPic = newPicPair.second;
            if (StringUtils.isEmpty(table.logoUrl)) {    //当缩略图存在即不再设置
                table.setLogoUrl(newPic);
            }
            saveImages.add(newPic);
        }
        if (!CollectionUtils.isEmpty(saveImages)) {
            table.setImages(saveImages);
        }
    }

    //抽取object
    private void extract(NewsPageTable table, String result) throws Exception {
        final Gson GSON = new Gson();
        NewsPageTemplate template = GSON.fromJson(result, new TypeToken<NewsPageTemplate>() {
        }.getType());
        extractContent(table, template);
        extractPublishDate(table, template);
        extractImage(table, template);
        HttpUtil.insertImage2Content(table);
        extractAuthor(table);
        extractTitle(table, template);
        table.setStatus(0);
        table.setCrawlFlag(1);
    }

    //获取seed
    public List<NewsPageTable> getSeed() throws Exception {
        BaseMongoRepository repo = BaseMongoTools.getInstance(DolphinCrawlerConsts.MongoDBName.ITAOGAO.getName());
        BasicDBObject keys = new BasicDBObject();
        keys.put(DolphinCrawlerConsts.MONGODB_DEFAULT_ID, 1);
        keys.put(ItaogaoConst.SCHEMA_COLUMN_NAME.CRAWL_URL.getValue(), 1);
        DBObject query = new BasicDBObject();
        query.put(ItaogaoConst.SCHEMA_COLUMN_NAME.CRAWL_FLAG.getValue(), new BasicDBObject("$ne", 1));
        DBCursor cursor = repo.getCollection(ItaogaoConst.MONGODB_TABLE.SOUTHCN.getValue())
                .find(query, keys)
                .addOption(Bytes.QUERYOPTION_NOTIMEOUT);
        List<NewsPageTable> seeds = new LinkedList<>();
        List<DBObject> dataIterator = cursor.toArray();
        for (DBObject data : dataIterator) {
            try {
                Object id = data.get(DolphinCrawlerConsts.MONGODB_DEFAULT_ID);
                Object url = data.get(ItaogaoConst.SCHEMA_COLUMN_NAME.CRAWL_URL.getValue());
                NewsPageTable table = new NewsPageTable();
                table.setId(id.toString());
                table.setCrawlUrl(url.toString());
                seeds.add(table);
            } catch (Exception er) {
                er.printStackTrace();
            }
        }
        return seeds;
    }
}
