package com.gxljc.bear.crawler.weixin;

import com.mongodb.BasicDBObject;
import com.mongodb.Bytes;
import com.mongodb.DBCursor;
import com.mongodb.DBObject;
import com.gxljc.commons.mongo.cli.BaseMongoRepository;
import com.gxljc.bear.crawler.base.BaseMongoTools;
import com.gxljc.bear.crawler.base.DolphinCrawlerConsts;
import com.gxljc.bear.crawler.util.BeanToMapUtil;
import com.gxljc.bear.crawler.util.SparkUtil;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.Update;

import java.io.Serializable;
import java.lang.reflect.Field;
import java.util.*;

/**
 * 同步PAGE数据爬虫。
 *
 * @author tanghaitao
 * @since 2022-10-10
 */
public class RsyncSogouPageCrawler implements Serializable {
    public static Logger LOG = Logger.getLogger(RsyncSogouPageCrawler.class);

    public RsyncSogouPageCrawler() {
    }

    public void rsync() throws Exception {
         rsyncSogouCrawlUrl(WeixinConst.MONGODB_SOGOU_PAGE);
    }

    private void rsyncSogouCrawlUrl(final String tableName) throws Exception {
        List<String> seeds = null;
        try{
            seeds = getSogouCrawlUrlSeed();
        }catch(Exception er){
            er.printStackTrace();
        }
        if (!CollectionUtils.isEmpty(seeds)) {
            int size = seeds.size();
            LOG.info(tableName + " page seeds size = " + size);
            if (size < WeixinConst.MAX_SINGLE_CORE) { //少于一定数量时，使用单线程爬取
                try {
                    rsyncNoSpark(seeds, tableName);
                } catch (Exception er) {
                    er.printStackTrace();
                    LOG.error("single core job error", er);
                }
            } else {
                try {
                    rsync(seeds, tableName);
                } catch (Exception er) {
                    er.printStackTrace();
                    LOG.error("spark job error", er);
                }
            }
        }
    }

    public void rsync(List<String> seeds, final String tableName) throws Exception {
        int coreMax = 10;
        JavaSparkContext jsc = SparkUtil.createCommonsSparkContext(
                "bear-sogou微信orgUrl-" + seeds.size(), coreMax,
                coreMax * 2, RsyncSogouPageCrawler.class);
        JavaRDD<String> seedsRDD = jsc.parallelize(new ArrayList<String>(seeds));
        long count = seedsRDD.mapPartitions(
                new FlatMapFunction<Iterator<String>, Integer>() {
                    private static final long serialVersionUID = 1L;

                    @Override
                    public Iterable<Integer> call(Iterator<String> seeds)
                            throws Exception {
                        int successCnt = 0;
                        while (seeds.hasNext()) {
                            try {
                                String seed = seeds.next();
                                int ret = rsync(seed, tableName);
                                if (ret == 1)
                                    successCnt++;
                                if (successCnt % 100 == 0) {
                                    LOG.info("successCnt=" + successCnt);
                                }
                            } catch (Exception er) {
                                er.printStackTrace();
                            }
                        }
                        return Arrays.asList(successCnt);
                    }
                }).count();
        LOG.info("all count=" + count);
        jsc.stop();
    }

    public void rsyncNoSpark(List<String> seeds, final String tableName) {
        if (CollectionUtils.isEmpty(seeds)) {
            LOG.error("page is empty");
            return;
        }
        for (String id : seeds) {
            try {
                rsync(id, tableName);
            } catch (Exception e) {
                e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
            }
        }
    }

    //单条url入口
    public int rsync(String id, final String tableName) throws Exception {
        LOG.info("crawl = " + tableName + ", id=" + id);
        PageTable table = getData(id, tableName);
        if (table == null) return DolphinCrawlerConsts.RESULT_NO;
        //sogou：判断page表是否存在记录，存在即不修改orgUrl值
        //不存在page时，判断是否存在orgUrl,不存在，即把crawlUrl写入orgUrl
        PageTable pageTable = getData(table.wid, WeixinConst.MONGODB_TABLE_PAGE);
        if (pageTable == null) {
            if (StringUtils.isEmpty(table.orgUrl)) {
                table.setOrgUrl(table.crawlUrl);
            }
        }
        saveMongodbPage(table);
        return DolphinCrawlerConsts.RESULT_YES;
    }

    //获取seed
    public PageTable getData(String id, String tableName) throws Exception {
        BaseMongoRepository repo = BaseMongoTools.getInstance(DolphinCrawlerConsts.MongoDBName.WEIXIN.getName());
        DBObject query = new BasicDBObject();
        query.put(DolphinCrawlerConsts.MONGODB_DEFAULT_ID, new BasicDBObject("$eq", id));
        DBCursor cursor = repo.getCollection(tableName)
                .find(query)
                .addOption(Bytes.QUERYOPTION_NOTIMEOUT);
        List<DBObject> dataIterator = cursor.toArray();
        for (DBObject data : dataIterator) {
            try {
                PageTable table = (PageTable) BeanToMapUtil.convertMap(PageTable.class, data.toMap());
                return table;
            } catch (Exception er) {
                er.printStackTrace();
            }
        }
        return null;
    }

    //存储至hbase
    private void saveMongodbPage(PageTable table) throws Exception {
        if (table == null) return;
        PageTable table1 = new PageTable();
        table1.set_id(table.wid);
        table1.setPageId(table.wid);
        table1.setStatus(0);
        table1.setOrgUrl(table.orgUrl);
        Query query = new Query();
        query.addCriteria(Criteria.where(DolphinCrawlerConsts.MONGODB_DEFAULT_ID).is(table1.pageId));
        Update update = new Update();
        Field[] fields = table1.getClass().getDeclaredFields();
        for (int i = 0; i < fields.length; i++) {
            Object value = fields[i].get(table1);
            if (value == null) continue;
            if (value instanceof List) {
                if (CollectionUtils.isEmpty((List) value)) continue;
            }
            if (value instanceof String) {
                if (StringUtils.isEmpty((String) value)) continue;
            }
            update.set(fields[i].getName(), value);
        }
        BaseMongoRepository repo = BaseMongoTools.getInstance(DolphinCrawlerConsts.MongoDBName.WEIXIN.getName());
        repo.upsert(query, update, WeixinConst.MONGODB_TABLE_PAGE);
    }

    //获取seed
    public List<String> getSogouCrawlUrlSeed() throws Exception {
        BaseMongoRepository repo = BaseMongoTools.getInstance(DolphinCrawlerConsts.MongoDBName.WEIXIN.getName());
        BasicDBObject keys = new BasicDBObject();
        Calendar now = Calendar.getInstance();
        now.add(Calendar.DATE, -2);
        Date startDate = now.getTime();
        keys.put(DolphinCrawlerConsts.MONGODB_DEFAULT_ID, 1);
        DBObject query = new BasicDBObject();
        query.put(WeixinConst.SCHEMA_COLUMN_NAME.CRAWL_FLAG.getValue(), new BasicDBObject("$eq", 1));
        query.put(WeixinConst.SCHEMA_COLUMN_NAME.WID.getValue(), new BasicDBObject("$ne", null));
        query.put(WeixinConst.SCHEMA_COLUMN_NAME.ORG_URL.getValue(), new BasicDBObject("$eq", null));
        query.put(WeixinConst.SCHEMA_COLUMN_NAME.PUBLISH_DATE.getValue(), new BasicDBObject("$gte", startDate));
        DBCursor cursor = repo.getCollection(WeixinConst.MONGODB_SOGOU_PAGE)
                .find(query, keys)
                .addOption(Bytes.QUERYOPTION_NOTIMEOUT);
        List<String> seeds = new LinkedList<>();
        List<DBObject> dataIterator = cursor.toArray();
        for (DBObject data : dataIterator) {
            try {
                Object id = data.get(DolphinCrawlerConsts.MONGODB_DEFAULT_ID);
                seeds.add(id.toString());
            } catch (Exception er) {
                er.printStackTrace();
            }
        }
        return seeds;
    }
}
