package com.chance.cc.crawler.development.command.meta.keyword.bilibili;

import com.chance.cc.crawler.development.command.meta.keyword.CrawlerDomainKeywordCommand;
import com.chance.cc.crawler.meta.core.bean.crawler.CrawlerDomainKeys;
import org.junit.Test;

import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;

/**
 * @author lt
 * @version 1.0
 * @date 2021-02-02 11:03:21
 * @email okprog@sina.com
 */
public class BilibiliKeyword {
    private static final String domain = "bilibili";
    private static final String nike_site = "nike_keyword";
    private static final String medical_site = "medical_keyword";
    private static CrawlerDomainKeywordCommand crawlerDomainKeywordCommand =
            new CrawlerDomainKeywordCommand("192.168.1.215", 9599);

    public static void main(String[] args) {
        String filepath = "crawler-scripts-command/src/main/java/com/chance/cc/crawler/development/command/meta/keyword/medical/medical_kw.txt";
        List<String> content = getContent(filepath);
        Set<String> dupList = new HashSet<>();
        for (String keyword : content) {
            System.out.println(keyword);
            dupList.add(keyword);
        }
        System.out.println(dupList.size());
        for (String keyword : dupList) {
            CrawlerDomainKeys crawlerDomainKeys = new CrawlerDomainKeys();
            crawlerDomainKeys.setDomain(domain);
            crawlerDomainKeys.setSite(medical_site);
            crawlerDomainKeys.setKeyword(keyword);
            crawlerDomainKeys.setStatus(0);
            crawlerDomainKeywordCommand.addOrUpdateKeys(crawlerDomainKeys);
//            crawlerDomainKeywordCommand.delKeys(crawlerDomainKeys);
        }
    }

    @Test
    public void simple(){
        crawlerDomainKeywordCommand = new CrawlerDomainKeywordCommand("192.168.1.217",9599);
        String site = "simple_simple";//search_keyword
        String filepath = "E:\\chance_work\\chance-crawler-development\\crawler-scripts-command\\src\\main\\java\\com\\chance\\cc\\crawler\\development\\command\\meta\\keyword\\bilibili\\bilibi-simple.txt";
        List<String> content = getContent(filepath);
        Set<String> dupList = new HashSet<>();
        for (String keyword : content) {
            System.out.println(keyword);
            dupList.add(keyword);
        }
        System.out.println(dupList.size());
        for (String keyword : dupList) {
            CrawlerDomainKeys crawlerDomainKeys = new CrawlerDomainKeys();
            crawlerDomainKeys.setDomain(domain);
            crawlerDomainKeys.setSite(site);
            crawlerDomainKeys.setKeyword(keyword);
            crawlerDomainKeys.setStatus(0);
            crawlerDomainKeywordCommand.addOrUpdateKeys(crawlerDomainKeys);
//            crawlerDomainKeywordCommand.delKeys(crawlerDomainKeys);
        }
    }


    public static List<String> getContent(String filepath) {
        //读取文件
        List<String> lineLists = null;
        try {
            System.out.println(filepath);
            lineLists = Files
                    .lines(Paths.get(filepath), Charset.defaultCharset())
                    .flatMap(line -> Arrays.stream(line.split("\n")))
                    .collect(Collectors.toList());
        } catch (IOException e) {
            e.printStackTrace();
        }
        return lineLists;
    }
}
