package com.gxljc.bear.crawler.weixin;

import com.mongodb.BasicDBObject;
import com.mongodb.Bytes;
import com.mongodb.DBCursor;
import com.mongodb.DBObject;
import com.gxljc.commons.mongo.cli.BaseMongoRepository;
import com.gxljc.commons.util.Pair;
import com.gxljc.bear.crawler.base.BaseMongoTools;
import com.gxljc.bear.crawler.base.DolphinCrawlerConsts;
import com.gxljc.bear.crawler.util.BeanToMapUtil;
import com.gxljc.bear.crawler.util.SparkUtil;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.log4j.Logger;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.Update;

import java.io.Serializable;
import java.lang.reflect.Field;
import java.util.*;

/**
 * 同步PAGE数据爬虫。
 *
 * @author tanghaitao
 * @since 2022-10-10
 */
public class RsyncPageCrawler implements Serializable {
    public static Logger LOG = Logger.getLogger(RsyncPageCrawler.class);
    RsyncSogouPageCrawler crawl = null;
    public RsyncPageCrawler() {
        crawl = new RsyncSogouPageCrawler();
    }

    public void rsync() throws Exception {
        final long SLEEP_TIME = 20 * 60 * 1000;
        while (true) {
            try {
                rsyncSeed(WeixinConst.MONGODB_SOGOU_PAGE);
                rsyncSeed(WeixinConst.WEIXIN_MOBILE_PAGE);
                rsyncSeed(WeixinConst.MONGODB_NEWRANK_PAGE);
                crawl.rsync();
            } catch (Exception er) {
                er.printStackTrace();
            }
            LOG.info("sleep 20 min");
            Thread.sleep(SLEEP_TIME); //20分钟检查一次
        }
    }

    private void rsyncSeed(final String tableName) throws Exception {
        List<String> seeds = getSeed(tableName);
        if (!CollectionUtils.isEmpty(seeds)) {
            int size = seeds.size();
            LOG.info(tableName + " page seeds size = " + size);
            if (size < WeixinConst.MAX_SINGLE_CORE) { //少于一定数量时，使用单线程爬取
                try {
                    rsyncNoSpark(seeds, tableName);
                } catch (Exception er) {
                    er.printStackTrace();
                    LOG.error("single core job error", er);
                }
            } else {
                try {
                    rsync(seeds, tableName);
                } catch (Exception er) {
                    er.printStackTrace();
                    LOG.error("spark job error", er);
                }
            }
        }
    }

    public void rsync(List<String> seeds, final String tableName) throws Exception {
        int coreMax = 15;
        JavaSparkContext jsc = SparkUtil.createCommonsSparkContext(
                "bear-微信文章同步-" + seeds.size(), coreMax,
                coreMax * 2, RsyncPageCrawler.class);
        JavaRDD<String> seedsRDD = jsc.parallelize(new ArrayList<String>(seeds));
        long count = seedsRDD.mapPartitions(
                new FlatMapFunction<Iterator<String>, Integer>() {
                    private static final long serialVersionUID = 1L;

                    @Override
                    public Iterable<Integer> call(Iterator<String> seeds)
                            throws Exception {
                        int successCnt = 0;
                        while (seeds.hasNext()) {
                            try {
                                String seed = seeds.next();
                                int ret = rsync(seed, tableName);
                                if (ret == 1)
                                    successCnt++;
                                if (successCnt % 100 == 0) {
                                    LOG.info("successCnt=" + successCnt);
                                }
                            } catch (Exception er) {
                                er.printStackTrace();
                            }
                        }
                        return Arrays.asList(successCnt);
                    }
                }).count();
        LOG.info("all count=" + count);
        jsc.stop();
    }

    public void rsyncNoSpark(List<String> seeds, final String tableName) {
        if (CollectionUtils.isEmpty(seeds)) {
            LOG.error("page is empty");
            return;
        }
        for (String id : seeds) {
            try {
                rsync(id, tableName);
            } catch (Exception e) {
                e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
            }
        }
    }

    //单条url入口
    public int rsync(String id, final String tableName) throws Exception {
        LOG.info("crawl = " + tableName + ", id=" + id);
        PageTable table = getData(id, tableName);
        if (table == null) return DolphinCrawlerConsts.RESULT_NO;
        if (tableName.equals(WeixinConst.MONGODB_SOGOU_PAGE)) {
            //sogou：判断page表是否存在记录，存在即不修改orgUrl值
            //不存在page时，判断是否存在orgUrl,不存在，即把crawlUrl写入orgUrl
            PageTable pageTable = getData(table.wid, WeixinConst.MONGODB_TABLE_PAGE);
            if (pageTable == null) {
                if (StringUtils.isEmpty(table.orgUrl)) {
                    table.setOrgUrl(table.crawlUrl);
                }
            }
        }
        if (tableName.equals(WeixinConst.MONGODB_NEWRANK_PAGE) || tableName.equals(WeixinConst.WEIXIN_MOBILE_PAGE)) {
            //newrank时，更新sogou相关的orgUrl字段
            updateSogouOrgUrlMongodb(table.wid, table.orgUrl);
        }
        saveMongodbPage(table);
        updateRsyncMongodb(id, tableName);
        return DolphinCrawlerConsts.RESULT_YES;
    }

    //save data to mongodb
    public static void updateSogouOrgUrlMongodb(String wid, String orgUrl) throws Exception {
        Query query = new Query();
        query.addCriteria(Criteria.where("wid").is(wid));
        Update update = new Update();
        update.set(WeixinConst.SCHEMA_COLUMN_NAME.ORG_URL.getValue(), orgUrl);
        BaseMongoRepository repo = BaseMongoTools.getInstance(DolphinCrawlerConsts.MongoDBName.WEIXIN.getName());
        try{
            repo.update(query, update, WeixinConst.MONGODB_SOGOU_PAGE, false);
        }catch(Exception er){
            er.printStackTrace();
        }
    }

    //save data to mongodb
    public static void updateRsyncMongodb(String id, String tableName) throws Exception {
        Query query = new Query();
        query.addCriteria(Criteria.where(DolphinCrawlerConsts.MONGODB_DEFAULT_ID).is(id));
        Update update = new Update();
        update.set(WeixinConst.SCHEMA_COLUMN_NAME.RSYNC.getValue(), 1);
        BaseMongoRepository repo = BaseMongoTools.getInstance(DolphinCrawlerConsts.MongoDBName.WEIXIN.getName());
        repo.upsert(query, update, tableName);
    }

    //获取seed
    public List<String> getSeed(String tableName) throws Exception {
        BaseMongoRepository repo = BaseMongoTools.getInstance(DolphinCrawlerConsts.MongoDBName.WEIXIN.getName());
        BasicDBObject keys = new BasicDBObject();
        keys.put(DolphinCrawlerConsts.MONGODB_DEFAULT_ID, 1);
        DBObject query = new BasicDBObject();
        query.put(WeixinConst.SCHEMA_COLUMN_NAME.CRAWL_FLAG.getValue(), new BasicDBObject("$eq", 1));
        query.put(WeixinConst.SCHEMA_COLUMN_NAME.RSYNC.getValue(), new BasicDBObject("$ne", 1));
        query.put(WeixinConst.SCHEMA_COLUMN_NAME.WID.getValue(), new BasicDBObject("$ne", null));
        DBCursor cursor = repo.getCollection(tableName)
                .find(query, keys)
                .addOption(Bytes.QUERYOPTION_NOTIMEOUT);
        List<String> seeds = new LinkedList<>();
        List<DBObject> dataIterator = cursor.toArray();
        for (DBObject data : dataIterator) {
            try {
                Object id = data.get(DolphinCrawlerConsts.MONGODB_DEFAULT_ID);
                seeds.add(id.toString());
            } catch (Exception er) {
                er.printStackTrace();
            }
        }
        return seeds;
    }

    //获取seed
    public PageTable getData(String id, String tableName) throws Exception {
        BaseMongoRepository repo = BaseMongoTools.getInstance(DolphinCrawlerConsts.MongoDBName.WEIXIN.getName());
        DBObject query = new BasicDBObject();
        query.put(DolphinCrawlerConsts.MONGODB_DEFAULT_ID, new BasicDBObject("$eq", id));
        DBCursor cursor = repo.getCollection(tableName)
                .find(query)
                .addOption(Bytes.QUERYOPTION_NOTIMEOUT);
        List<DBObject> dataIterator = cursor.toArray();
        for (DBObject data : dataIterator) {
            try {
                PageTable table = (PageTable) BeanToMapUtil.convertMap(PageTable.class, data.toMap());
                return table;
            } catch (Exception er) {
                er.printStackTrace();
            }
        }
        return null;
    }

    //存储至hbase
    private void saveMongodbPage(PageTable table) throws Exception {
        if (table == null) return;
        table.set_id(table.wid);
        table.setPageId(table.wid);
        table.setStatus(0);
        Query query = new Query();
        query.addCriteria(Criteria.where(DolphinCrawlerConsts.MONGODB_DEFAULT_ID).is(table.pageId));
        Update update = new Update();
        Field[] fields = table.getClass().getDeclaredFields();
        for (int i = 0; i < fields.length; i++) {
            Object value = fields[i].get(table);
            if (value == null) continue;
            if (value instanceof List) {
                if (CollectionUtils.isEmpty((List) value)) continue;
            }
            if (value instanceof String) {
                if (StringUtils.isEmpty((String) value)) continue;
            }
            update.set(fields[i].getName(), value);
        }
        BaseMongoRepository repo = BaseMongoTools.getInstance(DolphinCrawlerConsts.MongoDBName.WEIXIN.getName());
        repo.upsert(query, update, WeixinConst.MONGODB_TABLE_PAGE);
//        BaseMongoRepository repo = BaseMongoTools.getInstance(DolphinCrawlerConsts.MongoDBName.WEIXIN.getName());
//        try {
//            repo.insert(table, WeixinConst.MONGODB_TABLE_PAGE);
//        } catch (Exception er) {
//            System.out.println("insert page id " + table._id + " error");
//        }
    }


}
