package com.chance.cc.crawler.prod.command.meta.yyjjb;

import com.chance.cc.crawler.meta.core.bean.crawler.CrawlerDomainKeys;
import com.chance.cc.crawler.prod.command.meta.CommonCrawlerKeywordScript;

import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.*;
import java.util.stream.Collectors;

/**
 * created by ljl 20210630
 * 医药经济报  关键词查询板块
 */
public class YjjjbKeywordScript extends CommonCrawlerKeywordScript {

    private static final String domain = "yyjjbSearch";

    public static void main(String[] args) {
//        List<String> content = getContent("crawler-scripts-command-prod/src/main/java/com/chance/cc/crawler/prod/command/meta/dcdapp/series.txt");
//        List<String> list = duplicateKeywords("crawler-scripts-command-prod/src/main/java/com/chance/cc/crawler/prod/command/meta/dcdapp/series.txt");
//        System.out.println(content.size());
//        System.out.println(list.size());
        searchKw();
//        adhoc();
    }


    private static void searchKw(){
        String site = "search_keyword";
        String filePath = "crawler-scripts-command-prod/src/main/java/com/chance/cc/crawler/prod/command/meta/yyjjb/medical_kw.txt";
        List<String> keywords = duplicateKeywords(filePath);
        addKeywords(site,keywords);
//        delKeywords(site,keywords);
    }


    private static void adhoc(){
        String site = "adhoc-kw";
        String filePath = "crawler-scripts-command-prod/src/main/java/com/chance/cc/crawler/prod/command/meta/yishengzhan/adhoc-kw.txt";
        List<String> keywords = duplicateKeywords(filePath);
        addKeywords(site,keywords);
//        delKeywords(site,keywords);
    }


    private static List<String> duplicateKeywords(String filePath){
        List<String> content = getContent(filePath);
        System.out.println("去重前: " + content.size());
        Set<String> duplicate = new HashSet<>(content);
        return new ArrayList<>(duplicate);
    }


    private static void addKeywords(String site,List<String> keywords){
        for (String keyword : keywords) {
            System.out.println(keyword);
            CrawlerDomainKeys crawlerDomainKeys = new CrawlerDomainKeys();
            crawlerDomainKeys.setDomain(domain);
            crawlerDomainKeys.setSite(site);
            crawlerDomainKeys.setKeyword(keyword);
            crawlerDomainKeys.setStatus(0);
            keywordCommand.addOrUpdateKeys(crawlerDomainKeys);
        }
    }


    private static void delKeywords(String site,List<String> keywords){
        for (String keyword : keywords) {
            System.out.println(keyword);
            CrawlerDomainKeys crawlerDomainKeys = new CrawlerDomainKeys();
            crawlerDomainKeys.setDomain(domain);
            crawlerDomainKeys.setSite(site);
            crawlerDomainKeys.setKeyword(keyword);
            crawlerDomainKeys.setStatus(0);
            keywordCommand.delKeys(crawlerDomainKeys);
        }
    }


    public static List<String> getContent(String filepath)  {
        //读取文件
        List<String> lineLists = null;
        try {
            System.out.println(filepath);
            lineLists = Files
                    .lines(Paths.get(filepath), Charset.defaultCharset())
                    .flatMap(line -> Arrays.stream(line.split("\n")))
                    .collect(Collectors.toList());
            lineLists.removeIf(String::isEmpty);
        } catch (IOException e) {
            e.printStackTrace();
        }
        return  lineLists;
    }
}
