package util;

import org.apache.hadoop.conf.Configuration;
import java.util.HashMap;
import java.util.Properties;

public class MyProps {
    public static Properties producer_Props = new Properties();
    public static Properties consumer_Props = new Properties();
    public static HashMap<String, Object> kafka_Consumer = new HashMap<>();
	public static HashMap<String, Object> kafka_Producer = new HashMap<>();
	
	public static Configuration setConf(Configuration conf){
		conf.set("hbase.zookeeper.quorum","hadoop106,hadoop107,hadoop108");
        conf.set("hbae.zookeeper.property.client","2181");
        return conf;
	}

    static{
		
		 kafka_Producer.put("bootstrap.servers", "hadoop106:9092,hadoop107:9092,hadoop108:9092");
        //0、1 和 all：0表示只要把消息发送出去就返回成功；1表示只要Leader收到消息就返回成功；all表示所有副本都写入数据成功才算成功
        kafka_Producer.put("acks", "all");
        //重试次数
        kafka_Producer.put("retries", Integer.MAX_VALUE);
        //批处理的字节数
        kafka_Producer.put("batch.size", 16384);
        //批处理的延迟时间，当批次数据未满之时等待的时间
        kafka_Producer.put("linger.ms", 1);
        //用来约束KafkaProducer能够使用的内存缓冲的大小的，默认值32MB
        kafka_Producer.put("buffer.memory", 33554432);
        // properties.put("value.serializer",
        // "org.apache.kafka.common.serialization.ByteArraySerializer");
        // properties.put("key.serializer",
        // "org.apache.kafka.common.serialization.ByteArraySerializer");
        kafka_Producer.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        kafka_Producer.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
		
        kafka_Consumer.put("bootstrap.servers","hadoop106:9092,hadoop107:9092,hadoop108:9092");
        kafka_Consumer.put("group.id", "com-test");
        //from beginning
        kafka_Consumer.put("auto.offset.reset","earliest");
        kafka_Consumer.put("key.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
        kafka_Consumer.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        producer_Props.setProperty("bootstrap.servers","hadoop106:9092,hadoop107:9092,hadoop108:9092");
        producer_Props.setProperty("ack","all");
        producer_Props.put("key.serializer","org.apache.kafka.common.serialization.StringSerializer");
        producer_Props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        producer_Props.put("auto.offset.reset","earliest");

        consumer_Props.setProperty("bootstrap.servers","hadoop106:9092,hadoop107:9092,hadoop108:9092");
        consumer_Props.setProperty("group.id", "com-test");
        consumer_Props.put("key.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
        consumer_Props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        consumer_Props.put("auto.offset.reset","earliest");
    }
}
