package com.chance.cc.crawler.development.bootstrap.taobao;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.downloader.*;
import com.chance.cc.crawler.core.downloader.http.HttpClientDownloader;
import com.chance.cc.crawler.core.downloader.http.HttpClientGenerator;
import com.chance.cc.crawler.core.downloader.proxy.SiteConfigProxyProvider;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.record.CrawlerResultRecord;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.development.bootstrap.jingdong.JingdongSearchProductInfoImport;
import com.chance.cc.crawler.development.bootstrap.tianmao.TianmaoSearchProductInfoImport;
import com.chance.cc.crawler.development.utils.RedisReader;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;

import java.io.BufferedReader;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.comment;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;

/**
 * @author bx
 * @date 2020/12/24 0024 18:23
 */
public class TaobaoSearchProductInfoImport {

    public static final String listQueue = "taobao_search_result";


    public static void main(String[] args) throws IOException {
//        convertToRedis();


//        eCommerceProductImport();

//        jdProductProcess("F:\\chance_log\\keywords\\电商商品链接\\20211020\\jd_product_20211020.txt");
//        tbProductProcess("F:\\chance_log\\keywords\\电商商品链接\\20211020\\tb_product_20211020.txt");
        tmProductProcess("D:\\chance\\采集\\电商列表页\\20211115\\tm_product_20211115.txt","ts");
//        tmProductProcess("F:\\chance_log\\keywords\\电商商品链接\\20211020\\tm_product_20211020.txt",null);
//
//        syncToMysql("jingdong",JingdongSearchProductInfoImport.listQueue);
//        syncToMysql("taobao",listQueue);
//        syncToMysql("tianmao",TianmaoSearchProductInfoImport.listQueue);
        syncToMysql("tianmao",TianmaoSearchProductInfoImport.listQueue,"ts");
    }

    private static void init(){

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest("taobao", CrawlerEnum.CrawlerRequestType.turnPage)
                .httpUrl("https://rate.taobao.com/feedRateList.htm")
                .releaseTime(System.currentTimeMillis())
                .needWashed(false)
                .needParsed(false)
                .resultLabelTag(comment)
                .filter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange)
                .addFilterInfo(FilterUtils.memoryFilterKeyInfo("taobao"))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24*1,null))
                .build();
        requestRecord.setDownload(false);
        requestRecord.setSkipPipeline(true);

        HttpClientDownloader downloader = new HttpClientDownloader();
        downloader.setClientGenerator(new HttpClientGenerator());
        downloader.setProxyProvider(new SiteConfigProxyProvider());
        String requestQueueName = StringUtils.joinWith("-","crawler","taobao","comment","queue");
        CrawlerRequestRecord initCommentCrawlerRecord = CrawlerRequestRecord.builder()
                .startPageRequest("taobao_comment_keywords",turnPageItem)
                .httpUrl("http://localhost:9599/crawler/domain/common/api/v1/taobao/results/queue/init?requestQueueName="+requestQueueName)
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();
        HttpRequestBody jsonBody = HttpRequestBody.json(JSON.toJSONString(requestRecord), "utf-8");
        initCommentCrawlerRecord.getHttpRequest().setMethod(HttpConstant.Method.POST);
        initCommentCrawlerRecord.getHttpRequest().setRequestBody(jsonBody);



        HttpPage download = downloader.download(initCommentCrawlerRecord.getHttpRequest(), initCommentCrawlerRecord.getHttpConfig());

        System.out.println(download.getRawText());
    }

    private static void convertToRedis() throws IOException {
        FileReader fileReader = new FileReader("F:\\chance_log\\keywords\\电商商品链接\\列表\\20210124_taobao_img_url.json");
        BufferedReader br = new BufferedReader(fileReader);
        List<String> records = new ArrayList<>();
        String line;
        int count = 0;
        RedisReader redisReader = new RedisReader("192.168.1.215", 6379, 4);
        while ((line = br.readLine()) != null){
            count++;
            Json json = new Json(line);
            String kw = json.jsonPath("$.关键词").get();
            String itemId = json.jsonPath("$.itemId").get();
            String userId = json.jsonPath("$.sellerId").get();
            Map<String,String> contentMap = new HashMap<>();
            contentMap.put("detailUrl",json.jsonPath("$.URL").get());
            contentMap.put("itemId",json.jsonPath("$.itemId").get());
            contentMap.put("userId",json.jsonPath("$.sellerId").get());
            contentMap.put("itemName",json.jsonPath("$.商品名").get());
            contentMap.put("price",json.jsonPath("$.价格").get());
            contentMap.put("sales",json.jsonPath("$.月销量").get());
            contentMap.put("itemImage",json.jsonPath("$.商品图片").get());
            contentMap.put("shopName",json.jsonPath("$.shop_name").get());
            contentMap.put("keyword",kw);
            contentMap.put("dataId", StringUtils.joinWith("-","taobao",itemId,userId));
            String contentJson = JSON.toJSONString(contentMap);

            CrawlerResultRecord crawlerResultRecord = new CrawlerResultRecord();
            CrawlerData crawlerData = new CrawlerData();
            crawlerData.setCrawlerContent(contentJson);
            crawlerResultRecord.tagsCreator().bizTags().addCustomKV("keyword",kw);
            crawlerResultRecord.setJsonStr(JSON.toJSONString(crawlerData));


            records.add(JSON.toJSONString(crawlerResultRecord));
            if (records.size() > 1000){
                redisReader.listPush(listQueue, records.toArray(new String[records.size()]));
                records.clear();
            }
        }

        redisReader.listPush(listQueue,records.toArray(new String[records.size()]));

        System.out.println("总条数：" + count);
    }

    private static void syncToMysql(String domian,String resultQueue) throws IOException {
       syncToMysql(domian, resultQueue,null);
    }

    private static void syncToMysql(String domian,String resultQueue,String site) throws IOException {
        HttpClientDownloader downloader = new HttpClientDownloader();
        downloader.setClientGenerator(new HttpClientGenerator());
        downloader.setProxyProvider(new SiteConfigProxyProvider());

        HttpRequest httpRequest = new HttpRequest();
        String url = "http://192.168.1.217:9599/crawler/domain/common/api/v1/"+domian+"/search/results/sync?" +
                "resultQueue="+resultQueue;
        if (StringUtils.isNotBlank(site)){
            url += "&site="+site;
        }
        httpRequest.setUrl(url);
        HttpPage taobao = downloader.download(httpRequest, HttpConfig.me(domian));
        System.out.println(taobao.getRawText());
    }

    private static void eCommerceProductImport() throws IOException {
        BufferedReader bufferedReader = new BufferedReader(new FileReader("D:\\chance\\采集\\电商列表页\\20211115\\e_commerce_product_20211115.txt"));

        List<String> tbProduct = new ArrayList<>();
        List<String> tmProduct = new ArrayList<>();
        List<String> jdProduct = new ArrayList<>();

        FileOutputStream tbOutputStream = new FileOutputStream("D:\\chance\\采集\\电商列表页\\20211115\\tb_product_20211115.txt",true);
        FileOutputStream tmOutputStream = new FileOutputStream("D:\\chance\\采集\\电商列表页\\20211115\\tm_product_20211115.txt",true);
        FileOutputStream jdOutputStream = new FileOutputStream("D:\\chance\\采集\\电商列表页\\20211115\\jd_product_20211115.txt",true);

        String line = null;
        while ((line = bufferedReader.readLine()) != null){
            String[] lineArray = line.split("\t");
            String domain = lineArray[12];
            if ("taobao".equals(domain)){
                tbProduct.add(line);
            } else if ("tianmao".equals(domain)){
                tmProduct.add(line);
            } else if ("jingdong".equals(domain)){
                jdProduct.add(line);
            } else {
                System.out.println(line);
            }

            if (tbProduct.size() > 5000){
                IOUtils.writeLines(tbProduct,"\r\n",tbOutputStream, Charset.forName("utf-8"));
                tbProduct.clear();
            }
            if (tmProduct.size() > 5000){
                IOUtils.writeLines(tmProduct,"\r\n",tmOutputStream, Charset.forName("utf-8"));
                tmProduct.clear();
            }
            if (jdProduct.size() > 5000){
                IOUtils.writeLines(jdProduct,"\r\n",jdOutputStream, Charset.forName("utf-8"));
                jdProduct.clear();
            }
        }

        if (tbProduct.size() > 0){
            IOUtils.writeLines(tbProduct,"\r\n",tbOutputStream, Charset.forName("utf-8"));
        }
        if (tmProduct.size() > 0){
            IOUtils.writeLines(tmProduct,"\r\n",tmOutputStream, Charset.forName("utf-8"));
        }
        if (jdProduct.size() > 0){
            IOUtils.writeLines(jdProduct,"\r\n",jdOutputStream, Charset.forName("utf-8"));
        }
    }

    private static void jdProductProcess(String filePath) throws IOException {
        RedisReader redisReader = new RedisReader("192.168.1.215", 6379, 4);
        BufferedReader bufferedReader = new BufferedReader(new FileReader(filePath));

        List<String> jdProduct = new ArrayList<>();

        FileOutputStream jdOutputStream = new FileOutputStream("F:\\chance_log\\keywords\\电商商品链接\\20210607\\jd_product_result_20210602.txt",true);

        String line = null;
        List<String> highPriority = new ArrayList<>();
        List<String> lowPriority = new ArrayList<>();
        while ((line = bufferedReader.readLine()) != null){
            try {
                Map<String,String> productMap = new HashMap<>();
                String[] lineArray = line.split("\t",-1);
                productMap.put("itemId",lineArray[4]);
                productMap.put("itemName",lineArray[6]);
                productMap.put("comments",lineArray[1]);
                productMap.put("price",lineArray[9]);
                productMap.put("shopName",lineArray[11]);
                productMap.put("detailUrl",lineArray[3]);

                String keywords = lineArray[7];
                if (StringUtils.isBlank(keywords)){
                    keywords = JSON.toJSONString(new ArrayList<>());
                } else {
                    if(keywords.startsWith("\"")&&keywords.endsWith("\"")){
                        keywords = keywords.substring(1, keywords.length() - 1);
                    }
                    keywords = keywords.replaceAll("\"\"","\"");
                    keywords = JSON.toJSONString( JSON.parseObject(keywords, List.class));
                }
                productMap.put("keyword",keywords);
                productMap.put("brand",lineArray[0]);
                productMap.put("itemImage",lineArray[5]);
                productMap.put("dataId",StringUtils.joinWith("-","jingdong",lineArray[4]));

                String contentJson = JSON.toJSONString(productMap);

                CrawlerResultRecord crawlerResultRecord = new CrawlerResultRecord();
                CrawlerData crawlerData = new CrawlerData();
                crawlerData.setCrawlerContent(contentJson);
                crawlerResultRecord.tagsCreator().bizTags().addCustomKV("keyword",keywords);
                crawlerResultRecord.setJsonStr(JSON.toJSONString(crawlerData));

                String priority = lineArray[17];
                if ("1".equals(priority)){
                    highPriority.add(JSON.toJSONString(crawlerResultRecord));
                } else {
                    lowPriority.add(JSON.toJSONString(crawlerResultRecord));
                }
                System.out.println("jd process item count:"+ (highPriority.size()+lowPriority.size()));
            } catch (Exception e) {
                System.out.println(line);
                e.printStackTrace();
            }
        }

        for (String record : highPriority) {
            redisReader.listPush(JingdongSearchProductInfoImport.listQueue,record);
        }

        for (String record : lowPriority) {
            redisReader.listPush(JingdongSearchProductInfoImport.listQueue,record);
        }
    }

    private static void tbProductProcess(String filePath) throws IOException {
        RedisReader redisReader = new RedisReader("192.168.1.215", 6379, 4);
        BufferedReader bufferedReader = new BufferedReader(new FileReader(filePath));

        List<String> tbProduct = new ArrayList<>();

//        FileOutputStream tbOutputStream = new FileOutputStream("F:\\chance_log\\keywords\\电商商品链接\\20210607\\tb_product_result_20210602.txt",true);

        String line = null;
        List<String> highPriority = new ArrayList<>();
        List<String> lowPriority = new ArrayList<>();
        while ((line = bufferedReader.readLine()) != null){
            try {
                Map<String,String> productMap = new HashMap<>();
                String[] lineArray = line.split("\t",-1);
                productMap.put("itemId",lineArray[4].trim());
                productMap.put("itemName",lineArray[6]);
                productMap.put("price",lineArray[9]);
                productMap.put("shopName",lineArray[11]);
                productMap.put("detailUrl",lineArray[3]);

                String keywords = lineArray[7];
                if (StringUtils.isBlank(keywords)){
                    keywords = JSON.toJSONString(new ArrayList<>());
                } else {
                    if(keywords.startsWith("\"")&&keywords.endsWith("\"")){
                        keywords = keywords.substring(1, keywords.length() - 1);
                    }
                    keywords = keywords.replaceAll("\"\"","\"");
                    keywords = JSON.toJSONString( JSON.parseObject(keywords, List.class));
                }
                productMap.put("keyword",keywords);
                productMap.put("userId",lineArray[13].trim());
                productMap.put("sales",lineArray[10]);
                productMap.put("itemImage",lineArray[5]);
                productMap.put("dataId",StringUtils.joinWith("-","taobao",lineArray[4].trim(),lineArray[13].trim()));

                String contentJson = JSON.toJSONString(productMap);

                CrawlerResultRecord crawlerResultRecord = new CrawlerResultRecord();
                CrawlerData crawlerData = new CrawlerData();
                crawlerData.setCrawlerContent(contentJson);
                crawlerResultRecord.tagsCreator().bizTags().addCustomKV("keyword",keywords);
                crawlerResultRecord.setJsonStr(JSON.toJSONString(crawlerData));

                String priority = lineArray[17];
                if ("1".equals(priority)){
                    highPriority.add(JSON.toJSONString(crawlerResultRecord));
                } else {
                    lowPriority.add(JSON.toJSONString(crawlerResultRecord));
                }
            } catch (Exception e) {
                System.out.println(line);
                e.printStackTrace();
            }
        }

        for (String record : highPriority) {
            redisReader.listPush(listQueue,record);
        }

        for (String record : lowPriority) {
            redisReader.listPush(listQueue,record);
        }
    }

    private static void tmProductProcess(String filePath,String site) throws IOException {
        RedisReader redisReader = new RedisReader("192.168.1.215", 6379, 4);
        BufferedReader bufferedReader = new BufferedReader(new FileReader(filePath));

        List<String> tmProduct = new ArrayList<>();

//        FileOutputStream tbOutputStream = new FileOutputStream("F:\\chance_log\\keywords\\电商商品链接\\20210607\\tb_product_result_20210602.txt",true);

        String line = null;
        List<String> highPriority = new ArrayList<>();
        List<String> lowPriority = new ArrayList<>();
        while ((line = bufferedReader.readLine()) != null){
            try {
                Map<String,String> productMap = new HashMap<>();
                String[] lineArray = line.split("\t",-1);
                productMap.put("itemId",lineArray[4].trim());
                productMap.put("itemName",lineArray[6]);
                productMap.put("price",lineArray[9]);
                productMap.put("shopName",lineArray[11]);
                productMap.put("detailUrl",lineArray[3]);

                String keywords = lineArray[7];
                if (StringUtils.isBlank(keywords)){
                    keywords = JSON.toJSONString(new ArrayList<>());
                } else {
                    if(keywords.startsWith("\"")&&keywords.endsWith("\"")){
                        keywords = keywords.substring(1, keywords.length() - 1);
                    }
                    keywords = keywords.replaceAll("\"\"","\"");
                    keywords = JSON.toJSONString( JSON.parseObject(keywords, List.class));
                }
                productMap.put("keyword",keywords);
                productMap.put("userId",lineArray[13].trim());
                productMap.put("sales",lineArray[10]);
                productMap.put("itemImage",lineArray[5]);
                String dataId = StringUtils.joinWith("-", "tianmao", lineArray[4].trim(), lineArray[13].trim());
                if (StringUtils.isNotBlank(site)){
                    dataId = StringUtils.joinWith("-", "tianmao",site, lineArray[4].trim(), lineArray[13].trim());
                }
                productMap.put("dataId",dataId);

                String contentJson = JSON.toJSONString(productMap);

                CrawlerResultRecord crawlerResultRecord = new CrawlerResultRecord();
                CrawlerData crawlerData = new CrawlerData();
                crawlerData.setCrawlerContent(contentJson);
                crawlerResultRecord.tagsCreator().bizTags().addCustomKV("keyword",keywords);
                crawlerResultRecord.setJsonStr(JSON.toJSONString(crawlerData));

                String priority = lineArray[17];
                if ("1".equals(priority)){
                    highPriority.add(JSON.toJSONString(crawlerResultRecord));
                } else {
                    lowPriority.add(JSON.toJSONString(crawlerResultRecord));
                }
                System.out.println("tm process item count:"+ (highPriority.size()+lowPriority.size()));
            } catch (Exception e) {
                System.out.println(line);
                e.printStackTrace();
            }
        }

        for (String record : highPriority) {
            redisReader.listPush(TianmaoSearchProductInfoImport.listQueue,record);
        }

        for (String record : lowPriority) {
            redisReader.listPush(TianmaoSearchProductInfoImport.listQueue,record);
        }
    }
}
