package com.feidee.fdhadoop.constant;


public class Constant {
    public static int LOGCOLLECT_THREADPOOL_MAX_QUEUESIZE = 100000;

    public static String HBASE_DATA_BACKUP = "hbase_data_backup";
    //kafka consumer config
    public static String KAFKA_ENABLE_AUTO_COMMIT = "true";
    public static String KAFKA_AUTO_COMMIT_INTERVAL_MS = "1000";
    public static String KAFKA_SESSION_TIMEOUT_MS = "30000";
    //kafka producer config
    public static String KAFKA_ACKS = "all";
    public static String KAFKA_RETRIES = "0";
    public static String KAFKA_BATCH_SIZE = "16384";
    public static String KAFKA_LINGER_MS = "1";
    public static String KAFKA_BUFFER_MEMORY = "33554432";

    public static String LOG_URL_LIVE = "http://data.feidee.net/logCollect/events?em=n";
    public static String LOG_URL_TEST = "http://infras-dev.feidee.net/logCollect/events?em=n";

    // public static String CONFIG_UPDATER_ZK_URL_LIVE = "10.203.1.244,10.203.1.245,10.202.96.100,10.202.96.101,10.183.0.4:2181";
    public static String CONFIG_UPDATER_ZK_URL_LIVE = "10.200.5.83,10.200.5.84,10.200.5.85:2181";
    public static String CONFIG_UPDATER_ZK_URL_TEST = "10.201.7.113,10.201.7.114,10.201.7.115:2181";

    //默认hbase集群地址
    public static String HBASE_HA_DEFAULT_URL = "10.200.8.25,10.200.8.29,10.200.8.30";

    public static String LOG_APT = "bigdata";
    public static String LOG_BUS_HBASEUTIL = "hbaseutil_request_log";

    public static String HBASE_OP_TYPE_GET = "get";
    public static String HBASE_OP_TYPE_PUT = "put";
    public static String HBASE_OP_TYPE_DEL = "del";
}
