package com.chance.cc.crawler.development.command.job.domain.weibo.subscribe;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerJob;
import com.chance.cc.crawler.core.downloader.HttpConfig;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.command.job.CrawlerJobCommand;
import com.chance.cc.crawler.meta.core.bean.CrawlerMetaConstant;
import com.chance.cc.crawler.meta.core.bean.job.CrawlerScheduleJob;
import org.apache.commons.lang3.StringUtils;

import java.util.HashMap;
import java.util.Map;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRecordFilter.key;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;
import static com.chance.cc.crawler.meta.core.bean.CrawlerMetaConstant.ScheduleJobTriggerJob_Realtime;

/**
 * @Author Zhao.Hhuan
 * @Date Create in 2021/1/26 16:25
 * @Description
 *      微博api关键词
 **/
public class WeiboApiSubscribeKeyCrawlerScheduleJob {
    private static final String domain = "weibo";

    private static final String source = "1428199813";

    private static String service_ip = "192.168.1.215";
    private static int service_port = 9599;
    public static CrawlerJob publishApiSubscribeDataCrawlerScheduleJobInfo(){
        CrawlerJob weiboJob = publishJob();

        //发布单次任务
        CrawlerJobCommand crawlerJobCommand = new CrawlerJobCommand(service_ip,service_port);
        CrawlerScheduleJob crawlerScheduleJob = new CrawlerScheduleJob();
        crawlerScheduleJob.setDomain(domain);
        crawlerScheduleJob.setCrawlerJob(JSON.toJSONString(weiboJob));
        crawlerScheduleJob.setJobType(CrawlerMetaConstant.ScheduleCrawlerJobType.crawler.enumVal());
        crawlerScheduleJob.setNote("微博订阅关键词或用户");
        crawlerScheduleJob.setCrawlerKey(weiboJob.generateCrawlerKey());
        HttpPage page = crawlerJobCommand.addOrUpdateCrawlerScheduleJob(crawlerScheduleJob);
        System.out.println("发布作业：" + page.getRawText());
        return weiboJob;

    }

    public static CrawlerJob publishJob(){
        String url = "https://c.api.weibo.com/subscribe/update_subscribe.json";
        String site = "updateSubscribe";
        String subid = "12332";
        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPage)
                .domain(domain)
                .httpUrl(url)
                .httpConfig(HttpConfig.me(domain))
                .filter(key)
                .addFilterInfo(FilterUtils.redisFilterKeyInfo(StringUtils.joinWith("-","filter",domain,site,"queue")))
                .needParsed(false)
                .releaseTime(System.currentTimeMillis())
                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                .build();
        crawlerRequestRecord.setDownload(false);
        crawlerRequestRecord.setSkipPipeline(true);
        crawlerRequestRecord.tagsCreator().bizTags().addDomain(domain);
        crawlerRequestRecord.tagsCreator().bizTags().addSite(site);
        Map<String,Object> extras = new HashMap<>();
        extras.put("source",source);
        extras.put("subid",subid);
        extras.put("type","add_uids");
        crawlerRequestRecord.getHttpRequest().setExtras(extras);

        CrawlerRequestRecord keywordRecord = CrawlerRequestRecord.builder()
                .startPageRequest("weibo_subscribe_userid_add_keyword",turnPageItem)
                .httpUrl("http://192.168.1.215:9599/v1/meta/"+domain+"/keys?site=userId")
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();

        return CrawlerJob.builder()
                .triggerInfo(
                        domain,
                        CrawlerMetaConstant.ScheduleJobTrigger_Simple,
                        System.currentTimeMillis(),
                        StringUtils.joinWith("-",site,ScheduleJobTriggerJob_Realtime))
                .crawlerRequestQueue(CrawlerMetaConstant.redisRequestQueue(StringUtils.joinWith("-","crawler",domain,site,"queue")))
                .fileResultPipeline(null, "/data/chance_crawler_test/data/weibo/weiboSubscribeKey.log", false)
                .requestRecord(crawlerRequestRecord)
                .supportRecord(keywordRecord)
                .build();

    }
}
