package com.k2data.client;

import java.sql.Connection;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ThreadLocalRandom;

import org.I0Itec.zkclient.ZkClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONException;
import com.alibaba.fastjson.JSONObject;
import com.k2data.Utils.AssetUtils;
import com.k2data.Utils.Constants;
import com.k2data.Utils.EnvConf;
import com.k2data.Utils.K2ComposeConstants;
import com.k2data.Utils.MysqlService;
import com.k2data.Utils.Utils;
import com.k2data.platform.ddm.sdk.builder.KMXRecord;
import com.k2data.platform.ddm.sdk.builder.KMXRecordBuilder;
import com.k2data.platform.ddm.sdk.client.KMXClient;
import com.k2data.platform.ddm.sdk.client.KMXConfig;
import com.k2data.platform.ddm.sdk.common.DataType;

import kafka.admin.AdminUtils;
import kafka.api.TopicMetadata;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.MessageAndMetadata;
import kafka.serializer.StringDecoder;
import kafka.utils.VerifiableProperties;
import kafka.utils.ZKStringSerializer$;

/**
 * Created by lzx on 2017-8-18.
 */
public class ShaanGuRealTimeDataConsumer {

	private static Logger logger = LoggerFactory.getLogger(ShaanGuRealTimeDataConsumer.class);
	private Connection conn = null;
	private ConsumerConnector consumer;
	private KMXClient kmxClient;
	private String kmxKafkaUrl;
	private String sdmApiUrl;
	private String sgKafkaUrl;
	private List<String> assets;
	private String zkURL;
	private String sgExDataTopic;
	public static KafkaClient clientForEx;

	public ShaanGuRealTimeDataConsumer() {
	}

	public ShaanGuRealTimeDataConsumer(String zkURL, String sdmApiUrl, String kmxKafkaUrl, String sgKafkaUrl,
			String sgExDataTopic) {
		this.sdmApiUrl = sdmApiUrl;
		this.kmxKafkaUrl = kmxKafkaUrl;
		this.zkURL = zkURL;
		this.sgKafkaUrl = sgKafkaUrl;
		this.sgExDataTopic = sgExDataTopic;
		// create consumer
		Properties props = new Properties();
		// zookeeper conf
		props.put("zookeeper.connect", zkURL);
		// group
		props.put("group.id", "sgk2-consumer-group");
		// zk connection timeout
		props.put("zookeeper.session.timeout.ms", "4000");
		props.put("zookeeper.sync.time.ms", "200");
		props.put("auto.commit.interval.ms", "2000");
		props.put("auto.offset.reset", "smallest");
		props.put("enable.auto.commit", "false");
		// serializer data
		props.put("serializer.class", "kafka.serializer.StringEncoder");
		ConsumerConfig config = new ConsumerConfig(props);
		consumer = kafka.consumer.Consumer.createJavaConsumerConnector(config);

		try {
			init();
		} catch (Exception e) {
			// TODO Auto-generated catch block
			if (clientForEx != null) {
				clientForEx.close();
			}
			if (kmxClient != null) {
				kmxClient.close();
			}
			System.exit(1);
		}
	}

	public void init() throws Exception {
		logger.info("init...");
		if (kmxKafkaUrl == null || kmxKafkaUrl.trim().isEmpty()) {
			logger.error("input params is not valid:" + kmxKafkaUrl);
			System.exit(1);
		}
		// init client for exception producer
		try {
			clientForEx = new KafkaClient(sgExDataTopic, kmxKafkaUrl);

		} catch (ExecutionException e) {
			logger.error("kafka producer can't send message to server: " + kmxKafkaUrl);
			e.printStackTrace();
			throw e;
		} catch (InterruptedException e) {
			e.printStackTrace();
			throw e;
		}
		logger.info("start kafka producer success.");
		// init ddm sdk kafka client
		try {
			this.kmxClient = createKmxClient(kmxKafkaUrl);
		} catch (Exception e) {
			logger.error("create KmxClient failed:" + e.getMessage());
			throw e;
		}
	}

	public void consume(String sgRawDataTopicName, Map<String, String> publishedAssetsId)
			throws ExecutionException, InterruptedException {
		// topic
		long count = 0;
		long total = 0;
		long success = 0;
		long fail = 0;

		Map<String, Long> res = new HashMap<String, Long>();
		Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
		topicCountMap.put(sgRawDataTopicName, new Integer(1));
		// decoder string
		StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties());
		StringDecoder valueDecoder = new StringDecoder(new VerifiableProperties());
		// get consumer map
		Map<String, List<KafkaStream<String, String>>> consumerMap = consumer.createMessageStreams(topicCountMap,
				keyDecoder, valueDecoder);
		KafkaStream<String, String> stream = consumerMap.get(sgRawDataTopicName).get(0);
		ConsumerIterator<String, String> it = stream.iterator();
		// get message
		while (it.hasNext()) {
			MessageAndMetadata<String, String> message = it.next();
			count++;
			if (count == 1000) {
				logger.info("timestamp:" + System.currentTimeMillis() + " consume: " + count + " raw data");
				count = 0;
			}
			String msg = null;
			try {
				msg = message.message();
				logger.debug("received message is: " + msg);
				res = sendDataToKmx(kmxClient, msg, sdmApiUrl, DataType.JSON_STRING, 10, 500l, publishedAssetsId);
			} catch (Exception e) {
				logger.error("received msg is failed. Write data to exception area. " + e.getMessage());
				clientForEx.sendMessageSync(Constants.S8KJSONEXDATA_KEY + "_" + getRandomNum(), msg);
				e.printStackTrace();
				continue;
			}
			if (res != null) {
				total = total + Long.parseLong(res.get("all") + "");
				fail = fail + Long.parseLong(res.get("fail") + "");
				success = success + Long.parseLong(res.get("success") + "");
				if (fail > 0 || total > 10000) {

					logger.info("timestamp " + System.currentTimeMillis() + ", data sent to kmx, total: " + total
							+ ", success: " + success + ", fail :" + fail);
					total = 0;
					fail = 0;
					success = 0;
				}

			}
			consumer.commitOffsets(true);
		}
	}

	private String getRandomNum() {
		return String.valueOf(ThreadLocalRandom.current().nextInt(10000, 100000));
	}

	/**
	 * create KmxClient with platformServer
	 * 
	 * @param kmxKfkaUrl
	 *            kafka port 9092
	 * @return KMXClient
	 */
	private KMXClient createKmxClient(String kmxKfkaUrl) {
		KMXConfig config = new KMXConfig();
		config.put(com.k2data.platform.ddm.sdk.common.ParamNames.PLATFORM_SERVER, kmxKfkaUrl);
		config.put(com.k2data.platform.ddm.sdk.common.ParamNames.DATA_TYPE, DataType.JSON_STRING);
		config.put(com.k2data.platform.ddm.sdk.common.ParamNames.SEND_TIMEOUT_MILLISECS, 60000);
		config.getConfig();
		// config.put("acks", "1");
		return new KMXClient(config);
	}

	/**
	 * 
	 * @param msg
	 * @throws InterruptedException
	 * @throws ExecutionException
	 */
	private Map<String, Long> sendDataToKmx(KMXClient client, String s8kData, String kmxAssetUrl, DataType dataType,
			int checkTimes, long checkInterval, Map<String, String> publishedAssetsId)
			throws ExecutionException, InterruptedException {
		Map<String, Long> result = new HashMap<String, Long>();
		JSONArray array = new JSONArray();
		KMXRecordBuilder builder = KMXRecordBuilder.getBuilder();
		long all = 0;
		long success = 0;
		long fail = 0;
		// 动态数据
		if (s8kData == null) {
			logger.info("s8000 data is null");
			return null;
		}
		try {
			array = JSONArray.parseArray(s8kData);
			all = array.size();
			logger.debug("raw data size is: " + array.size());
			if (array == null || array.isEmpty()) {
				logger.info("s8000 data is empty");
				return null;
			}
		} catch (JSONException jsonEx) {
			logger.error("s8000 data can not be parsed to JSONArray,write data to execption area ");
			logger.debug("s8000 data is " + s8kData);
			clientForEx.sendMessageSync(Constants.S8KJSONEXDATA_KEY + "_" + getRandomNum(), s8kData);
			return null;
		}
		KMXRecord r = null;
		for (int i = 0; i < array.size(); i++) {
			JSONObject asset = new JSONObject();
			JSONObject data = array.getJSONObject(i);
			try {
				long ts = data.getJSONObject("sampleTime").getLongValue("timestamp");
				JSONArray fields = data.getJSONArray("fields");
				Map<String, String> compoundId = new LinkedHashMap<>();
				String assetId = null;
				builder.setTimestamp(ts);
				String fieldGroupId = null;
				for (int j = 0; j < fields.size(); j++) {
					JSONObject field = fields.getJSONObject(j);
					boolean isIdFiled = Utils.isIdField(field.getString("fieldId"));
					String fieldKey = field.getString("fieldId");
					Object fieldValue = field.get("fieldValue");
					builder.setField(fieldKey, fieldValue, isIdFiled);
					if (isIdFiled) {
						compoundId.put(fieldKey, fieldValue.toString() + "");
					}
					if ("channel_type".equalsIgnoreCase(fieldKey)) {
						fieldGroupId = Utils.getFgId(Integer.parseInt(field.getString("fieldValue")));
						builder.setFieldGroupId(fieldGroupId);
					}
				}
				asset.put("fieldGroupId", fieldGroupId);
				asset.put("compoundId", compoundId);
				assetId = AssetUtils.sysid(compoundId);
				r = builder.getRecord();
				if (!publishedAssetsId.containsKey(assetId)) {
					if (AssetUtils.autoRegisterAsset(kmxAssetUrl, assetId, asset, checkTimes, checkInterval)) {
						publishedAssetsId.put(assetId, "0");
						client.send(r);
						success++;
					} else {
						logger.error(
								"can not register asset for this data,send data to kmx,write data to kmx exception area");
						// clientForEx.sendMessageSync(Constants.S8KNOASSETEXDATA_KEY
						// + "_" + getRandomNum(),
						// data.toString());
						client.send(r);
						fail++;
					}
				} else {
					client.send(r);
					success++;
				}

			} catch (Exception e) {
				logger.error("Send msg to KMX got exception: " + e.getMessage());
				if (r != null) {
					logger.error(data.toString());
				}
				clientForEx.sendMessageSync(Constants.SENDDATATOKMXEX_KEY + getRandomNum(), data.toString());
				fail++;
			}
		}

		result.put("all", all);
		result.put("fail", fail);
		result.put("success", success);
		return result;

	}

	public static void main(String[] args) {
		// final String sg_raw_kafka_zk_url = "106.120.241.178:2222";
		// final String sdmDBUrl = "jdbc:mysql://192.168.130.62:13306/k2db";
		// final String sg_kafka_url = "106.120.241.178:9999";
		// final String kmxZkUrl = "192.168.130.62:2181";
		// final String kmxKafkaUrl = "192.168.130.61:9092,192.168.130.60:9092";
		// final String sdmApiUrl =
		// "http://192.168.130.62:8081/data-service/v2";
		// final String sgRawDataTopic = "sg_k2";
		// final String sgExDataTopic = "defaultUser-defaultSpace-exception";

		// Params for KMX

		final String kmxZkUrl = EnvConf.getEvnValue(K2ComposeConstants.DATASERVICE_KAFKA_ZK, "");
		final String kmxKafkaUrl = EnvConf.getEvnValue(K2ComposeConstants.DATASERVICE_KAFKA, "");
		final String metadataUrl = EnvConf.getEvnValue(K2ComposeConstants.SDM_REST_API_URL, "");
		final String sdmDBUrl = EnvConf.getEvnValue(K2ComposeConstants.DATASERVICE_MYSQL_URL, "");

		// Params for shanngu
		final String sg_raw_kafka_zk_url = EnvConf.getEvnValue(K2ComposeConstants.SG_RAW_KAFKA_ZK_URL, kmxZkUrl);
		final String sg_raw_kafka_url = EnvConf.getEvnValue(K2ComposeConstants.SG_KAFKA_URL, kmxKafkaUrl);
		final String sgRawDataTopic = EnvConf.getEvnValue(K2ComposeConstants.SG_RAWDATA_TOPIC, "");
		final String sgExDataTopic = EnvConf.getEvnValue(K2ComposeConstants.SG_EXDATA_TOPIC,  Constants.KMX_DATA_EX_TOPIC);
		
		logger.info("KMX Platform zookeeper url :" + kmxZkUrl);
		logger.info("KMX Platform kafka url :" + kmxKafkaUrl);
		logger.info("KMX platform metadata api url :" + metadataUrl);
		logger.info("KMX platform sdm db :" + sdmDBUrl);

		logger.info("Shanngu raw data kafka url :" + sg_raw_kafka_url);
		logger.info("Shanngu raw data zookeeper url :" + sg_raw_kafka_zk_url);
		logger.info("Shanngu raw data kafka topic name :" + sgRawDataTopic);
		logger.info("Shanngu exception data kafka topic name :" + sgExDataTopic);

		

		if (kmxZkUrl.trim().isEmpty() || sg_raw_kafka_url.trim().isEmpty() || metadataUrl.trim().isEmpty()
				|| kmxZkUrl.trim().isEmpty() || sg_raw_kafka_zk_url.isEmpty() || sdmDBUrl.trim().isEmpty()
				|| kmxKafkaUrl.isEmpty()) {
			logger.error("env config containing the error entry,please check...");
			System.exit(-1);
		}

		ZkClient client = new ZkClient(sg_raw_kafka_zk_url, 4000, 2000, ZKStringSerializer$.MODULE$);
		TopicMetadata metadata = AdminUtils.fetchTopicMetadataFromZk(sgRawDataTopic, client);

		final int sgRawKafkaPartationNum = metadata.partitionsMetadata().size();
		if (client != null) {
			client.close();
		}

		logger.info("Shaagu raw kafka have " + sgRawKafkaPartationNum + " topics");

		// final String KmxkafkaServerUrl =
		// EnvConf.getEvnValue(K2ComposeConstants.KMX_KAFKA_URL, "");
		final String assetUrl = metadataUrl + "/assets";
		logger.info("KMX kafka broker list is:" + kmxKafkaUrl);
		final Connection conn = MysqlService.getConnection(sdmDBUrl, "root", "passw0rd");
		final Map<String, String> publishedAssetsId = AssetUtils.getPublishedAssetsId(conn);
		try {
			logger.info("Close sdm db connection.");
			if (conn != null) {
				conn.close();
			}
		} catch (SQLException e) {
			logger.error("close sdm db connection got error.");
			e.printStackTrace();
		}
		logger.info("Find " + publishedAssetsId.size() + " published assets");
		for (int i = 0; i < sgRawKafkaPartationNum; i++) {
			Thread thread = new Thread(new Runnable() {
				@Override
				public void run() {
					try {
						new ShaanGuRealTimeDataConsumer(sg_raw_kafka_zk_url, assetUrl, kmxKafkaUrl, sg_raw_kafka_url,sgExDataTopic)
								.consume(sgRawDataTopic, publishedAssetsId);
					} catch (ExecutionException e) {
						e.printStackTrace();
					} catch (InterruptedException e) {
						e.printStackTrace();
					}
				}
			});
			thread.start();
		}

	}
}
