package com.feidee.fdhadoop.kafka;

import com.feidee.fdcommon.configuration.CustomConfiguration;
import com.feidee.fdcommon.constant.CommonConstant;
import com.feidee.fdhadoop.constant.Constant;
import com.feidee.fdhadoop.logCollect.LogCollectService;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.log4j.Logger;

import java.util.Arrays;
import java.util.Properties;

public class KafkaUtil {
	private static Logger logger = Logger.getLogger(LogCollectService.class);

//	static {
//		KerberosAuthentication.authenticate();
//	}

	public static KafkaProducer getProducer(String brokers) {
		logger.info("get kafka producer~");
		Properties props = new Properties();
		props.put("bootstrap.servers", brokers);
		props.put("acks", CustomConfiguration.getString("kafka.acks", Constant.KAFKA_ACKS));
		props.put("retries", CustomConfiguration.getString("kafka.retries", Constant.KAFKA_RETRIES));
		props.put("batch.size", CustomConfiguration.getString("kafka.batch.size", Constant.KAFKA_BATCH_SIZE));
		props.put("linger.ms", CustomConfiguration.getString("kafka.linger.ms", Constant.KAFKA_LINGER_MS));
		props.put("buffer.memory", CustomConfiguration.getString("kafka.buffer.memory", Constant.KAFKA_BUFFER_MEMORY));
		props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
		props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
//		if (CommonConstant.ENV_TEST.equals(CustomConfiguration.getString(CommonConstant.ENV_KEY, CommonConstant.ENV_LIVE))) {
//			props.put("security.protocol", "SASL_PLAINTEXT");
//			props.put("sasl.kerberos.service.name", "kafka");
//			props.put("sasl.kerberos.min.time.before.relogin", CustomConfiguration.getString("sasl.kerberos.min.time.relogin", "300000"));
//			logger.info("======> Common-Hadoop-Producer: kafka初始化添加kerberos校验权限 <=======");
//		}
		logger.info("kafka producer config:" + props.toString());
		return new KafkaProducer<>(props);
	}

	public static KafkaConsumer<String, String> getConsumer(String brokers, String groupid, boolean autoCommit) {
		logger.info("get kafka consumer>>>");
		Properties props = new Properties();
		props.put("bootstrap.servers", brokers);
		props.put("group.id", groupid);
		props.put("enable.auto.commit", autoCommit);
		props.put("auto.commit.interval.ms", CustomConfiguration.getString("kafka.auto.commit.interval.ms", Constant.KAFKA_AUTO_COMMIT_INTERVAL_MS));
		props.put("session.timeout.ms", CustomConfiguration.getString("kafka.session.timeout.ms", Constant.KAFKA_SESSION_TIMEOUT_MS));
		props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
		props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
		props.put("auto.offset.reset",CustomConfiguration.getString("kafka.auto.offset.reset", "latest"));
		props.put("max.poll.records",CustomConfiguration.getInt("kafka.max.poll.records", 500));
//		if (CommonConstant.ENV_TEST.equals(CustomConfiguration.getString(CommonConstant.ENV_KEY, CommonConstant.ENV_LIVE))) {
//			props.put("security.protocol", "SASL_PLAINTEXT");
//			props.put("sasl.kerberos.service.name", "kafka");
//			props.put("sasl.kerberos.min.time.before.relogin", CustomConfiguration.getString("sasl.kerberos.min.time.relogin", "300000"));
//			logger.info("======> Common-Hadoop-Consumer: kafka初始化添加kerberos校验权限 <=======");
//		}
		logger.info("kafka consumer config:" + props.toString());
		return new KafkaConsumer<>(props);
	}

	public static void main(String[] args) {
		CustomConfiguration.setString(CommonConstant.ENV_KEY, "test");
		KafkaConsumer<String, String> consumer = getConsumer("10.201.7.187:9093, 10.201.7.188:9093, 10.201.7.189:9093, 10.201.7.190:9093",
				"test123456", false);
		consumer.subscribe(Arrays.asList("onlinead_billing_dev"));
		ConsumerRecords<String, String> records = consumer.poll(1000);
		for (ConsumerRecord<String, String> record : records) {
			System.out.println("key=" + record.key() + ", value=" + record.value() + "\n");
			consumer.commitSync();
		}

		KafkaProducer producer = getProducer("10.201.7.187:9093, 10.201.7.188:9093, 10.201.7.189:9093, 10.201.7.190:9093");
		for (int i = 1; i <= 10; i++) {
			String value = "value_" + i;
			ProducerRecord<String, String> msg = new ProducerRecord<String, String>("onlinead_billing_dev", "zzzzzz", value);
			producer.send(msg);
			System.out.println(i);
		}
	}

}
