package dyyx.util;

import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Random;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.serialization.StringDeserializer;

public abstract class KafkaUtil {
	
	private static final int topicCount;
	private static final int consumerCount;
	private static final String servers;
	private static final String acks;
	
	
	
	
	private static final String TOPIC_PREFIX = "allinone_test_topic_";

	private static final Random RAND = new Random();

	
	private static volatile Producer<String, String> producer;
	
	private static volatile boolean consumerPause = false;
	
	
	static{
		
		int topicCountTmp = ConfigUtil.getInt("kafka.topic.count");
		int consumerCountTmp = ConfigUtil.getInt("kafka.consumer.count");
		String serversTmp = ConfigUtil.get("kafka.servers");
		String acksTmp = ConfigUtil.get("kafka.acks");

		
		if(topicCountTmp<=0 || topicCountTmp>=100){
			throw new RuntimeException("kafka topicCount error,"+topicCountTmp);
		}
		
		if(consumerCountTmp<=0 || consumerCountTmp>=10){
			throw new RuntimeException("kafka consumerCount error,"+consumerCountTmp);
		}
		
		if(CommUtil.isBlank(serversTmp)){
			throw new RuntimeException("kafka servers blank");
		}
		
		if(CommUtil.isBlank(acksTmp)){
			throw new RuntimeException("kafka acks blank");
		}
		
		
		serversTmp = serversTmp.trim();
		acksTmp = acksTmp.trim();
		
		topicCount = topicCountTmp;
		consumerCount = consumerCountTmp;
		servers = serversTmp;
		acks = acksTmp;
		
		try{
			init();
		}catch(Throwable e){
			throw new RuntimeException("kafka init Error",e);
		}
		
	}
	

	public static Map<String,Object> getConfigMap(){
		Map<String,Object>  map = new HashMap<>();
		map.put("topicCount", topicCount);
		map.put("consumerCount", consumerCount);
		map.put("servers", servers);
		map.put("acks", acks);


		return map;
	}
	
	public static String getTopic(int index){
		if(index<0 || index >topicCount){
			index = RAND.nextInt(topicCount);
		}
		return TOPIC_PREFIX+index;
	}
	
	
	public static RecordMetadata sendMsg(int topicIndex,String key,String value)throws Exception{
		if(CommUtil.isBlank(value)){
			return null;
		 
		}
		
		String topic =  getTopic(topicIndex);
		
		ProducerRecord<String, String> record = null;
		
		if(CommUtil.isBlank(key)){
			record = new ProducerRecord<String, String>(topic, value);
		}else{
			record = new ProducerRecord<String, String>(topic, key, value);
		}
		
		return producer.send(record).get();
		
	}
	
	private static synchronized void init()throws Exception{
		
		StringBuilder sb = new StringBuilder();
		
		Properties props = new Properties();
		props.put("bootstrap.servers", servers);
		props.put("acks", acks);
		props.put("retries", 0);
		props.put("batch.size", 16384);
		props.put("linger.ms", 1);
		props.put("buffer.memory", 33554432);
		props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
		props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");

		long start = System.currentTimeMillis();
		Producer<String, String> producerTmp = new KafkaProducer<String, String>(props);
		long end = System.currentTimeMillis();
		long time = end - start;
		
		sb.append("producer create time="+time);
		

		producer = producerTmp;
		
		// 
		
		start = System.currentTimeMillis();
		ConsumerThread[] ts = new ConsumerThread[consumerCount];
		for(int i=0;i<consumerCount;i++){
			ConsumerThread t = new ConsumerThread();	
			t.setName("ConsumerThread-"+i);
			ts[i] = t;
			
		}
		for(int i=0;i<consumerCount;i++){
			ts[i].start();	
		}
		end = System.currentTimeMillis();
		time = end - start;
		
		sb.append(",Consumer create time="+time+",consumerCount="+consumerCount);
		
		RunStatusUtil.updateStaticInfo("kafkaInitInfo", sb.toString());
		
	}
	
	
	
	
	private static class ConsumerThread extends Thread{
		
		private final KafkaConsumer<String, String> consumer;
		
		public ConsumerThread(){
			

			Properties props = new Properties();

			props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
			props.put(ConsumerConfig.GROUP_ID_CONFIG, "allinone_test_group");
			props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
			props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000");
			props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
			props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);

			
			List<String> topics = new ArrayList<>();
			for(int i=0;i<topicCount;i++){
				topics.add(TOPIC_PREFIX+i);
			}
			KafkaConsumer<String, String> consumerTmp = new KafkaConsumer<String, String>(props);
			consumerTmp.subscribe(topics);
			consumer = consumerTmp;
			
			
		}
		public void run(){
			
			while(true){
				
				try{
				
				if(consumerPause){
					CommUtil.doSleep(500);
					RunStatusUtil.updateCount("consumerPause");
					RunStatusUtil.logInfo("consumerPause", LocalDateTime.now().toString());
					continue;
				}
				
				long start = System.currentTimeMillis();
				ConsumerRecords<String, String> records = consumer.poll(100);
				long end = System.currentTimeMillis();
				long time = end - start;
                int num = -1;
                if(records!=null){
                	num = records.count();
                }
                
                String info = "num="+num+",time="+time+","+LocalDateTime.now()+","+consumer;
				RunStatusUtil.logInfo("consumerInfo", info);

			
				if(records==null || records.isEmpty()){
					RunStatusUtil.updateCount("consumerRecordsEmpty");

					continue;
				}
				if(num>0){
				    RunStatusUtil.updateCount("consumerRecords",num);
				}

				
				for (ConsumerRecord<String, String> record : records) {
					System.out.println(record);
					// Thread.sleep(1000L);
				}
				
				}catch(Throwable e){
					RunStatusUtil.logError("consumerError", e);
					CommUtil.doSleep(1000);
				}
				
			}
			
		}
	}

	 

}