package com.chance.cc.crawler.prod.command.job.domain.vm.dcdapp;

import com.chance.cc.crawler.core.downloader.proxy.Proxy;
import com.chance.cc.crawler.prod.command.job.CrawlerJobCommand;

/**
 * @author lt
 * @version 1.0
 * @date 2021-05-13 13:55:33
 * @email okprog@sina.com
 */
public class DCDAppCommonCrawlerSchedulerJob {
    public static String domain = "dcdapp";
    public static String site = "Common_A_V";

    public static final String metaServiceIp = "192.168.1.217";
    public static final int metaServicePort = 9599;
    public static final String metaServiceHttpPrefix = "http://"+metaServiceIp+":"+metaServicePort;

    public static final String userAgentUrl = "https://fake-useragent.herokuapp.com/browsers/0.1.11";

    public static final String kafkaTopicForRealTimeJob = "vm_auto";
    public static final String kafkaTopicForTraceJob = "vm_auto_comment";

    public static CrawlerJobCommand metaServiceCommand = new CrawlerJobCommand(metaServiceIp,metaServicePort);

    public static Proxy proxy = new Proxy();
    static {
        //代理配置
        //H5168QRFNIU3804D
        //5F6B3610BB719FAA
        proxy.setHost("http-dyn.abuyun.com");
        proxy.setPort(9020);
        proxy.setUsername("HL89Q19E86E2987D");
        proxy.setPassword("71F33D94CE5F7BF2");
    }
}
