package com.chance.cc.crawler.development.bootstrap.weibo.weiboapi;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.downloader.HttpConfig;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.controller.DevCrawlerController;
import org.apache.commons.lang3.StringUtils;
import org.springframework.util.DigestUtils;

import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.HashSet;
import java.util.Set;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRecordFilter.dateRange;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;

/**
 * @ClassName weibo
 * @Description TODO
 * @Author songding
 * @Date 2021/11/25 15:32
 * @Version 1.0
 * 脚本对文章  互动量  回复   作者信息   使用site_biz 进行数据区分
 **/
public class WeiboApiNewStart {
    private static final String domain = "weibo";
    private static final String site = "wbApi";
    private static final String authorUrl = "https://c.api.weibo.com/2/users/show_batch/other.json";
    private static final String interactionUrl = "https://c.api.weibo.com/2/statuses/count/biz.json";
    private static final String createUrl = "https://c.api.weibo.com/2/search/statuses/historical/create.json";//创建任务
    private static final String checkUrl = "https://c.api.weibo.com/2/search/statuses/historical/check.json?task_id=%s&timestamp=%s&signature=%s&access_token=%s";
    public static void main(String[] args) {
      //  CrawlerRequestRecord interactionRecord = interaction();
       // CrawlerRequestRecord articleRecord = article();
        CrawlerRequestRecord articleRecord = check();
        CrawlerRequestRecord keywordRecord = CrawlerRequestRecord.builder()
                .startPageRequest("dcdapp_series_keyword",turnPageItem)
                .httpUrl("http://192.168.1.217:9599/v1/meta/weibo/keys?site=serachKw")
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();


        DevCrawlerController.builder()
                .triggerInfo(domain,domain,System.currentTimeMillis(),domain)
                .crawlerRequestQueue(DevCrawlerController.devRequestQueue(domain))
                .consoleResultPipeline()
                //.fileResultPipeline(null,"F:\\chance_log\\weibo\\interaction.log",false)
               // .fileResultPipeline(null,"F:\\chance_log\\weibo\\author.log",true)
                .fileResultPipeline(null,"F:\\chance_log\\weibo\\txt.log",false)
                .requestRecord(articleRecord)
               // .supportRecord(keywordRecord)
                .build("com.chance.cc.crawler.development.scripts.weibo")
                .start();
    }

    public static CrawlerRequestRecord check(){
        String id = "1428199813";
        String secret_key = "1c3c340b06d60cd422aa";
        String task_id = "14495777";
        long time = System.currentTimeMillis();
        String signature = DigestUtils.md5DigestAsHex((id+secret_key+time).getBytes());
        String access_token = "2.00o4_w1HrAaeYBedf38e38b8SnITmD";
        String url = String.format(checkUrl,task_id,time,signature,access_token);
        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPage)
                .domain(domain)
                .httpUrl(url)
                .httpConfig(HttpConfig.me(domain))
                .releaseTime(System.currentTimeMillis())
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24*10,null))
               // .needParsed(false)
                .build();
       // requestRecord.setSkipPipeline(true);
       // requestRecord.setDownload(false);
        requestRecord.tagsCreator().bizTags().addDomain(domain);
        requestRecord.tagsCreator().bizTags().addSite(site);
        requestRecord.tagsCreator().bizTags().addCustomKV("id",id);
        requestRecord.tagsCreator().bizTags().addCustomKV("task_id",task_id);
        requestRecord.tagsCreator().bizTags().addCustomKV("secret_key",secret_key);
        requestRecord.tagsCreator().bizTags().addSiteBiz("comment");
        long starttime = 1637337600000l;
        long endtime = 1637855999000l;

        CrawlerRecord filterRecord = new CrawlerRecord();
        long[] longs = new long[2];
        longs[0] =  starttime;
        longs[1] =  System.currentTimeMillis();
        filterRecord.setFilter(dateRange);  //评论采集指定时间过滤条件
        filterRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(0,longs));
        requestRecord.tagsCreator().bizTags().addCustomKV("comment_record_filter_info", JSON.toJSONString(filterRecord));

        requestRecord.tagsCreator().bizTags().addCustomKV("starttime",starttime);
        requestRecord.tagsCreator().bizTags().addCustomKV("endtime",System.currentTimeMillis());
        return requestRecord;
    }


    public static CrawlerRequestRecord article(){
        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPage)
                .domain(domain)
                .httpUrl(createUrl)
                .httpConfig(HttpConfig.me(domain))
                .releaseTime(System.currentTimeMillis())
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24*10,null))
                .needParsed(false)
                .build();
        requestRecord.setSkipPipeline(true);
        requestRecord.setDownload(false);
        requestRecord.tagsCreator().bizTags().addDomain(domain);
        requestRecord.tagsCreator().bizTags().addSite(site);
        long starttime = 1637337600000l;
        long endtime = 1637855999000l;

        CrawlerRecord filterRecord = new CrawlerRecord();
        long[] longs = new long[2];
        longs[0] =  starttime;
        longs[1] =  System.currentTimeMillis();
        filterRecord.setFilter(dateRange);  //评论采集指定时间过滤条件
        filterRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(0,longs));
        requestRecord.tagsCreator().bizTags().addCustomKV("comment_record_filter_info", JSON.toJSONString(filterRecord));

        requestRecord.tagsCreator().bizTags().addCustomKV("starttime",starttime);
        requestRecord.tagsCreator().bizTags().addCustomKV("endtime",endtime);
        return requestRecord;
    }

    public static CrawlerRequestRecord interaction(){

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPage)
                .domain(domain)
                .httpUrl(authorUrl)
                //.httpUrl(interactionUrl)
                .httpConfig(HttpConfig.me(domain))
                .releaseTime(System.currentTimeMillis())
                .filter(CrawlerEnum.CrawlerRecordFilter.dateRange)
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24*7,null))
                .build();
        requestRecord.setSkipPipeline(true);
        requestRecord.setDownload(false);
        requestRecord.tagsCreator().bizTags().addDomain(domain);
        requestRecord.tagsCreator().bizTags().addSite(site);
        StringBuffer sb = new StringBuffer();
        Set set = new HashSet();
        try {
            mid(sb);
            uid(set);
        } catch (IOException e) {
            e.printStackTrace();
        }
        sb.substring(0,sb.length()-1);
        requestRecord.tagsCreator().bizTags().addCustomKV("uids",set);
       // requestRecord.tagsCreator().bizTags().addCustomKV("mids",sb);
        return requestRecord;
    }

    public static void mid(StringBuffer sb) throws IOException {
        String path = "E:\\chance-crawler-development\\crawler-dev-bootstrap\\src\\main\\java\\com\\chance\\cc\\crawler\\development\\bootstrap\\weibo\\weiboapi\\weibo.txt";//改成自己本地路径
        BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(new FileInputStream(path), "UTF-8"));
        String readLine = null;
        while ((readLine = bufferedReader.readLine()) != null) {
            JSONObject jsonObject = JSONObject.parseObject(readLine);
            String mid = jsonObject.getString("mid");
            sb.append(mid).append(",");

            if(StringUtils.isBlank(readLine)){
                continue;
            }
        }
    }
    public static void uid(Set set) throws IOException {
        String path = "E:\\chance-crawler-development\\crawler-dev-bootstrap\\src\\main\\java\\com\\chance\\cc\\crawler\\development\\bootstrap\\weibo\\weiboapi\\weibo.txt";//改成自己本地路径
        BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(new FileInputStream(path), "UTF-8"));
        String readLine = null;
        while ((readLine = bufferedReader.readLine()) != null) {
            JSONObject jsonObject = JSONObject.parseObject(readLine);
            String uid = jsonObject.getString("uid");
            set.add(uid);
            if(StringUtils.isBlank(readLine)){
                continue;
            }
        }
    }


}
