package com.chance.cc.crawler.development.command.job.domain.dxy;

import com.chance.cc.crawler.core.downloader.proxy.Proxy;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.development.command.job.CrawlerJobCommand;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;

/**
 * @author lt
 * @version 1.0
 * @date 2021-02-05 17:55:41
 * @email okprog@sina.com
 */
public class DXYCommonScript {
    public static final String domain = "dxy";

    public static final String metaServiceIp = "192.168.1.215";
    public static final int metaServicePort = 9599;
    public static final String metaServiceHttpPrefix = "http://"+metaServiceIp+":"+metaServicePort;

    public static final String kafkaTopicForRealTimeJob = "";
    public static final String kafkaTopicForTraceJob = "tmp_medical_treatment";

    public static CrawlerJobCommand metaServiceCommand = new CrawlerJobCommand(metaServiceIp,metaServicePort);

    public static Proxy proxy = new Proxy();
    static {
        //代理配置
        //HL89Q19E86E2987D
        //71F33D94CE5F7BF2
        proxy.setHost("http-dyn.abuyun.com");
        proxy.setPort(9020);
        proxy.setUsername("HL89Q19E86E2987D");
        proxy.setPassword("71F33D94CE5F7BF2");
    }

    //关键词 source record
    public static CrawlerRequestRecord keywordCrawlerRecord = CrawlerRequestRecord.builder()
            .startPageRequest(domain + "_keywords",turnPageItem)
            .httpUrl("http://192.168.1.215:9599//v1/meta/"+domain+"/keys?site=search")
            .requestLabelTag(supportSource)
            .requestLabelTag(internalDownload)
            .build();
}
