package com.yjs.app.trade.service.service.util;


import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import com.yjs.app.trade.core.util.PropertiesUtils;

import net.sf.json.JSONObject;

@Component
public class KafkaService {

	@Autowired
	UtilService utilService;

	@SuppressWarnings("rawtypes")
	private static Map<String, KafkaConsumer> consumers = new HashMap<String, KafkaConsumer>();

	private static final Properties props = new Properties();

	public static boolean close = false;
	
	private Logger log = LoggerFactory.getLogger(Logger.class);
	
	@SuppressWarnings({ "unchecked", "rawtypes" })
	public void kafkaConsumer(String bootstrapServers, String groupId, String topic) {
		if (consumers.get(topic) != null) {
			log.info("该队列已启动!");
		} else {
			props.put("bootstrap.servers", bootstrapServers);
			props.put("group.id", groupId);
			props.put("enable.auto.commit", PropertiesUtils.getProperty("kafka.enable.auto.commit").toString());
			props.put("auto.commit.interval.ms", PropertiesUtils.getProperty("kafka.auto.commit.interval.ms").toString());
			props.put("session.timeout.ms", PropertiesUtils.getProperty("kafka.session.timeout.ms").toString());
			props.put("auto.offset.reset", PropertiesUtils.getProperty("kafka.auto.offset.reset").toString());
			props.put("key.deserializer", PropertiesUtils.getProperty("kafka.key.deserializer").toString());
			props.put("value.deserializer", PropertiesUtils.getProperty("kafka.value.deserializer").toString());
			KafkaConsumer consumer = new KafkaConsumer<>(props);
//			consumer.subscribe(Arrays.asList(topic));
			TopicPartition partition = new TopicPartition(topic, 0);
			consumer.assign(Arrays.asList(partition));
			consumers.put(topic, consumer);
			while (true) {
				if (close) {
					break;
				}
				try {
					ConsumerRecords<String, String> records = consumers.get(topic).poll(30000L);
					if (records != null && !records.isEmpty()) {
						Iterator<ConsumerRecord<String, String>> iter = records.iterator();
						List<JSONObject> msgs = new ArrayList<JSONObject>();
						List<JSONObject> list = new ArrayList<>();
						while (iter.hasNext()) {
							ConsumerRecord<String, String> record = iter.next();
							String value = (String) record.value();
							try {
								list = JSONObject.fromObject(value).getJSONArray("body");
							} catch (Exception e) {
								e.printStackTrace();
								log.error(e.toString());
								continue;
							} finally {
								consumer.commitSync();
							}
							for (JSONObject json : list) {
								msgs.add(json);
							}
						}
						if (msgs != null && msgs.size() > 0) {
							utilService.reviceMessages(msgs);
						}
					} 
				} catch (Exception e) {
					e.printStackTrace();
					log.error(e.getMessage());
				}
			}
		}
	}

	public static void kafkaProducer(String bootstrapServers, String keyDeserializer, String valueDeserializer, String topic ,String message) {
		props.remove("group.id");
		props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
		props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
		KafkaProducer<String, String> producer = new KafkaProducer<>(props);
		producer.send(new ProducerRecord<String, String>(topic, message));
		producer.close();
	}
	
	public static void destroy(String topic) {
		if (consumers.get(topic) != null) {
			close = true;
			consumers.get(topic).close();
			consumers.remove(topic);
		}
	}
	
	@SuppressWarnings({ "unchecked", "resource", "rawtypes" })
	public static void main(String[] args) {
		Properties props = new Properties();
		props.put("bootstrap.servers", "192.168.13.30:9092,192.168.13.31:9092");
		props.put("group.id", "order-sync");
		props.put("enable.auto.commit", "false");
		props.put("auto.commit.interval.ms", "1000");
		props.put("session.timeout.ms", "30000");
		props.put("auto.offset.reset", "earliest");
		props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
		props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
		KafkaConsumer consumer = new KafkaConsumer<>(props);
		String topic = "order-trans-exchange";
		consumer.subscribe(Arrays.asList(topic));
		while (true) {
			try {
				ConsumerRecords<String, String> records = consumer.poll(30000L);
				if (records != null && !records.isEmpty()) {
					Iterator<ConsumerRecord<String, String>> iter = records.iterator();
					int i = 0;
					long endOffset = 0L;
					List<String> msgs = new ArrayList<String>();
					while (iter.hasNext()) {
						ConsumerRecord<String, String> record = iter.next();
						++i;
						String value = (String) record.value();
						String msg = null;
						try {
							System.out.println(value);
						} catch (Exception e) {
							System.out.println(e.toString());
							continue;
						}
						msgs.add(msg);
					}
					System.out.println(i);
					System.out.println(endOffset);
					System.out.println(JSONObject.fromObject(msgs).toString());
					consumer.commitSync();
				} 
			} catch (Exception e) {
				System.out.println(e.getMessage());
			}
		}
	}
}
