package com.chance.cc.crawler.development.bootstrap.weixin.api;

import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.downloader.*;
import com.chance.cc.crawler.core.downloader.http.HttpClientDownloader;
import com.chance.cc.crawler.core.downloader.http.HttpClientGenerator;
import com.chance.cc.crawler.core.downloader.proxy.SiteConfigProxyProvider;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.controller.DevCrawlerController;
import org.apache.commons.lang3.StringUtils;
import org.junit.Test;
import org.yaml.snakeyaml.util.UriEncoder;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;

/**
 * @author lt
 * @version 1.0
 * @date 2021-03-17 16:19:47
 * @email okprog@sina.com
 */
public class WeiXinApiStart {

    public static final String domain = "weixin";
    public static final String site = "apikw";
    public static final String kwSite = "medical_keyword";
    public static final String authorSite = "medical_weixin_uid";

    public static final String siteBiz = "article-trace";

    public static final String INTERACTION_DATA_DAY_FROM_START_TIME = "interaction_data_day_from_start_time";
    public static final String ARTICLE_DATA_DAY_FROM_NOW_TIME = "article_data_day_from_now_time";
    public static final String IS_OUTPUT_Interaction = "is_output_Interaction";

    public static void main(String[] args) {

        CrawlerRequestRecord searchKWRecord = searchKWRecord();
        CrawlerRequestRecord supportRecord = getKwRecord(authorSite);

        DevCrawlerController devCrawlerController = DevCrawlerController.builder()
                .triggerInfo(domain, domain + "_trigger", System.currentTimeMillis(), domain + "_job")
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(domain)) //内存队列
                .consoleResultPipeline() //控制台输t出
                .fileResultPipeline("D:\\chance\\data\\weixin\\weixin_uid_04-11.json", false)
                .requestRecord(searchKWRecord)  //more job
                .supportRecord(supportRecord)
                .crawlerThreadNum(3)
                .build();
//        devCrawlerController.getCrawlerJob().getScheduleTags().getCategoryTag().addKVTag(IS_OUTPUT_Interaction,true);
//        devCrawlerController.getCrawlerJob().getScheduleTags().getCategoryTag().addKVTag(INTERACTION_DATA_DAY_FROM_START_TIME,1);
        devCrawlerController.getCrawlerJob().getScheduleTags().getCategoryTag().addKVTag(ARTICLE_DATA_DAY_FROM_NOW_TIME,1);
        devCrawlerController.start();
    }

    public static CrawlerRequestRecord searchKWRecord(){
        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, turnPageItem)
                .httpUrl("https://weixin.qq.com/")
                .releaseTime(System.currentTimeMillis())
                .filter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange)
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24*30+14,null))//最大回溯时间范围
                .addFilterInfo(FilterUtils.memoryFilterKeyInfo(StringUtils.joinWith("-","filter",domain,site,"queue")))
                .needWashed(false)
                .needParsed(false)
                .build();
        requestRecord.setDownload(false);
        requestRecord.setSkipPipeline(true);
        requestRecord.tagsCreator().bizTags().addDomain(domain);
        requestRecord.tagsCreator().bizTags().addSite(site);
        requestRecord.tagsCreator().bizTags().addSiteBiz(siteBiz);
        return requestRecord;
    }

    public static CrawlerRequestRecord getKwRecord(String metaSite){
        //keyword record
        return CrawlerRequestRecord.builder()
                .startPageRequest(StringUtils.joinWith("-",domain, site),turnPageItem)
                .httpUrl("http://192.168.1.215:9599/v1/meta/"+domain+"/keys?site=" + metaSite)
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();
    }


    @Test
    public void testDealer(){
        HttpConfig httpConfig = HttpConfig.me("test");
        httpConfig.setUseProxy(false);

        HttpClientDownloader downloader = new HttpClientDownloader();
        downloader.setClientGenerator(new HttpClientGenerator());
        downloader.setProxyProvider(new SiteConfigProxyProvider());

        String url = "https://api.newrank.cn/api/custom/ipsos/v2/weixin/data/search";
        String body = "expression={\"should\":[\"妙手互联网\",\"微医\",\"因数健康\",\"康付\",\"妙手保险\",\"医渡云\",\"京东健康\",\"妙手医药\",\"腾讯健康\",\"水滴好药付\",\"镁信\",\"水滴筹\",\"妙手医生\",\"微保\",\"阿里健康\",\"思派互联网\",\"零氪\",\"平安好医生\",\"思派药房\",\"惠民保\",\"诺信\",\"思派保险\",\"圆心惠保\",\"妙手药房\",\"丁香医生\",\"药益保\",\"平安健康医疗\",\"平安健康药店\",\"药联健康\",\"平安健康保险\"]}&size=20&from=2021-04-05 00:00:00&to=2021-04-06 00:00:00&page=41";
        HttpRequest httpRequest = new HttpRequest();
        httpRequest.setUrl(url);
        httpRequest.setMethod(HttpConstant.Method.POST);
        httpRequest.addHeader("key","vf94a4bdb7b0e49bba2e5z6jt");
        httpRequest.setRequestBody(HttpRequestBody.custom(UriEncoder.encode(body).getBytes(),"form-data","utf-8"));
        HttpPage httpPage = downloader.download(httpRequest, httpConfig);
        System.out.println(httpPage.getRawText());
        JSONObject jsonObject = JSONObject.parseObject(httpPage.getRawText());
        JSONObject resultObj = jsonObject.getJSONArray("result").getJSONObject(0);
        int commentCount = resultObj.getIntValue("commentCount");
        System.out.println(commentCount);
    }

}
