package com.chance.cc.crawler.prod.command.job.domain.commerce.dangdang;

import com.chance.cc.crawler.core.downloader.proxy.Proxy;
import com.chance.cc.crawler.prod.command.job.CrawlerJobCommand;

/**
 * @ClassName dangdang
 * @Description TODO
 * @Author songding
 * @Date 2021/10/19 16:28
 * @Version 1.0
 * 当当网
 **/
public class DangdangCommonCrawlerScheduleJob {
    public static final String domain = "dangdang";

    public static final String metaServiceIp = "192.168.1.217";
    public static final int metaServicePort = 9599;
    public static final String metaServiceHttpPrefix = "http://"+metaServiceIp+":"+metaServicePort;

    public static final String kafkaTopic = "e_commerce";
    public static final String kafkaTopic_supplement = "tmp_e_commerce_supplement";

    public static CrawlerJobCommand metaServiceCommand = new CrawlerJobCommand(metaServiceIp,metaServicePort);

    public static Proxy proxy = new Proxy();
    static {
        //代理配置
        //H5168QRFNIU3804D
        //5F6B3610BB719FAA
        proxy.setHost("http-dyn.abuyun.com");
        proxy.setPort(9020);
        proxy.setUsername("HL89Q19E86E2987D");
        proxy.setPassword("71F33D94CE5F7BF2");
    }
}
