package com.etlmaster.executor.realtime.kafka;

import com.etlmaster.executor.utils.LogWriter;

import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.Future;

import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.clients.producer.internals.FutureRecordMetadata;


/**
 * Kafka操作帮助类
 * @author Larry
 * @copyright Larry@etlMaster
 * @since 2017-09-10
 */
public class KafkaHelper {
	private static Producer producer;
	private static KafkaConsumer consumer;
	private static Properties props;

	private static String KafkaServer="conf/kafka/kafkaServer.properties";
	
	public static Producer getProducer() throws UnsupportedEncodingException,
			IOException {
		if (producer == null)
			initProducer();

		return producer;
	}

	private static void initProducer() throws UnsupportedEncodingException,
			IOException {
		initProperties();
		producer = new KafkaProducer(props);
	}

	private static void initProperties() throws UnsupportedEncodingException,
			IOException {
		if (props == null) {
			props = new Properties();
			LogWriter.addLog("DEBUG",KafkaServer);
			InputStream in = KafkaHelper.class.getClassLoader()
					.getResourceAsStream(KafkaServer);
			if (in != null)
				props.load(new InputStreamReader(in, "utf-8"));
			else
				LogWriter.addLog("ERROR","未找到KafkaServer 文件！");
		}
	}

	public static KafkaConsumer getConsumer(String topic)
			throws UnsupportedEncodingException, IOException {
		if (consumer == null)
			initConsumer(topic);

		return consumer;
	}

	private static void initConsumer(String topic)
			throws UnsupportedEncodingException, IOException {
		initProperties();
		consumer = new KafkaConsumer(props);
		consumer.subscribe(Arrays.asList(new String[] { topic }));
	}

	public static void sendAndCheckResult(String topic, int partition, int i,
			Producer producer, List<String> data) {
		List reponses = sendMessage(topic, partition, producer, data);
		List errorList = checkResult(reponses, data);
		if ((errorList.size() != 0) && (i == 0)) {
			LogWriter.addLog(new String[] { "WARN", "有部分数据发送失败，重新发送" });
			sendAndCheckResult(topic, partition, i + 1, producer, errorList);
		}
	}

	public static List<String> checkResult(List<Future> responses,
			List<String> data) {
		LogWriter.addLog(new String[] { "INFO", "检查发送结果" });
		List errorList = new ArrayList();
		for (int i = 0; i < responses.size();i++) {
			try {
				FutureRecordMetadata future = (FutureRecordMetadata) responses
						.get(i);
				RecordMetadata meta = future.get();
				if ((i != 0) && (i != responses.size() - 1))
					continue;
				LogWriter.addLog(new String[] { "INFO",
						"offset:{} partition:{}",""+meta.offset(),meta.partition()+"" });
			} catch (Exception e) {
				errorList.add((String) data.get(i));
				LogWriter
						.addLog(new String[] { "ERROR", "异常：" + e.getMessage() });
			}
			
		}

		data.clear();
		LogWriter.addLog(new String[] { "WARN",
				"信息发送异常的记录数为：" + errorList.size() });
		return errorList;
	}

	@SuppressWarnings("unchecked")
	public static List<Future> sendMessage(String topic, int partition,
			Producer producer, List<String> data) {
		LogWriter.addLog(new String[] { "INFO", "开始,数据长度为：" + data.size() });
		List responses = new ArrayList();
		for (int i = 0; i < data.size(); ++i) {
			if (i % 10000 == 0)
				producer.flush();
			ProducerRecord record = null;
			if (partition >= 0)
				record = new ProducerRecord<String, String>(topic, partition,
						String.valueOf(System.currentTimeMillis()),
						(String) data.get(i));
			else
				record = new ProducerRecord<String, String>(topic, String.valueOf(System
						.currentTimeMillis()), (String) data.get(i));

			responses.add(producer.send(record));
		}
		producer.flush();
		LogWriter.addLog(new String[] { "INFO", "发送完成" });
		return responses;
	}
}