package com.linkstec.wechat;

import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.errors.WakeupException;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
/**
 * Kafka New Consumer API基于Kafka自身的group coordination protocol（老版本基于Zookeeper）
 * ，new Consumer具有以下优势
1、合并过去High Level和Low Level的API，提供一个同时支持group coordination和lower level access
2、使用纯Java重写API，运行时不再依赖Scala和Zookeeper
3、更安全：Kafka0.9提供的security extensions，只支持new consumer
4、支持fault-tolerant group of consumer processes，老版本强依赖于zookeeper来实现，由于其中的逻辑极其复杂，
所以其他编程语言实现这个特性非常困难，目前kafka官方已经将此特性在C client上实现了

虽然new consumer重构API并且使用新的coordination protocol，但是概念并没有根本改变，
所以熟悉old consumer的用户不会难以理解new consumer。然而，需要额外关心下group management 和threading model。
 * @author linkage
 *
 */
public class KafkaConsumerTestNew implements Runnable {
	private static final Logger logger = LoggerFactory.getLogger(KafkaConsumerTestNew.class);
  private final KafkaConsumer<String, String> consumer;
  private final List<String> topics;
  private final int id;
  public KafkaConsumerTestNew(int id,
                      String groupId, 
                      List<String> topics) {
    this.id = id;
    this.topics = topics;
    Properties props = new Properties();
//    props.put("bootstrap.servers", "localhost:9092");
    props.put("bootstrap.servers", "192.168.2.232:9092,192.168.2.233:9092,192.168.2.234:9092");
    props.put("group.id", groupId);
    props.put("key.deserializer", StringDeserializer.class.getName());
    props.put("value.deserializer", StringDeserializer.class.getName());
    //pushTest_2本消费组不存在offset程序启动前的数据不消费,只在程序启动的时候消费发来的消息
    props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
//    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
    this.consumer = new KafkaConsumer<>(props);
  }
  @Override
  public void run() {
    try {
      consumer.subscribe(topics);
      while (true) {
        ConsumerRecords<String, String> records = consumer.poll(Long.MAX_VALUE);
        for (ConsumerRecord<String, String> record : records) {
          Map<String, Object> data = new HashMap<>();
          data.put("partition", record.partition());
          data.put("offset", record.offset());
          data.put("value", record.value());
          
          JSONArray jSONArray = JSONArray.parseArray(record.value());
          int itemCount = jSONArray.size();

			for (int i = 0; i < itemCount; i++) {
				JSONObject jsonData = jSONArray.getJSONObject(i);
				String appendInformation = jsonData.getString("appendInformation");
				logger.info("附加信息：" + appendInformation);
				Map<String, String> appendMap = getAppendInfoValueAndKey(appendInformation);
				HashMap<String, String> map = new HashMap<String, String>();
				map.put("data2", LocalDateTime.now().toString());
				map.put("data3", appendMap.get("转账金额") + "元");
				WechatHttpClient.sendWechat(map);
			}
          
          //[{"anewFlag":"0","appendInformation":"转账方式=银行转证券;转账金额=100000;转账时间=2018-05-16 14:38;客户号=null;资金账户=****5678","auto":true,"bizDate":0,"branchId":"","custName":"","customerId":"2717358","deadLine":1578914761574,"email":"","eventDesc":"","eventFlowId":"2510","eventId":389,"eventTrgTime":1578914761573,"requestId":"0","telphone":""}]
          
          logger.info(this.id + ": " + data);
        }
        consumer.commitAsync();
      }
    } catch (WakeupException e) {
      // ignore for shutdown 
    } finally {
      consumer.close();
    }
  }
  public void shutdown() {
    consumer.wakeup();
  }
public static void main(String[] args) {
	  int numConsumers = 1;
	  String groupId = "pushTest_2";
	  List<String> topics = Arrays.asList("test");
	  ExecutorService executor = Executors.newFixedThreadPool(numConsumers);
	  final List<KafkaConsumerTestNew> consumers = new ArrayList<>();
	  for (int i = 0; i < numConsumers; i++) {
	    KafkaConsumerTestNew consumer = new KafkaConsumerTestNew(i, groupId, topics);
	    consumers.add(consumer);
	    executor.submit(consumer);
	  }
	  Runtime.getRuntime().addShutdownHook(new Thread() {
	    @Override
	    public void run() {
	      for (KafkaConsumerTestNew consumer : consumers) {
	        consumer.shutdown();
	      } 
	      executor.shutdown();
	      try {
	        executor.awaitTermination(5000, TimeUnit.MILLISECONDS);
	      } catch (InterruptedException e) {
	        e.printStackTrace();
	      }
	    }
	  });
	}

/**
 * 模板参数键、值获取
 *
 * @param originalStr
 *            如：[客户编号=79001465;客户姓名=刘**;销户日期=20161109]
 * @return LMap<String, String> map
 */
	protected Map<String, String> getAppendInfoValueAndKey(String originalStr) {
		// 以分号分割 [客户编号=79001465;客户姓名=刘**;销户日期=20161109]
		String[] strs = originalStr.split(";");
		Map<String, String> map = new HashMap<String, String>();
		for (String str : strs) {
			// [客户编号,79001465]
			String destStr = null;
			if (str != null) {
				Pattern p = Pattern.compile("\t|\r|\n");
				Matcher m = p.matcher(str);
				destStr = new String(m.replaceAll(""));
			}
			// 判断字符串中是否含有“=”
			if (destStr.contains("=")) {
				String[] strings = destStr.split("=", 2);
				if (strings.length == 2) {
					map.put(strings[0], strings[1]);
				}
			}
		}
		return map;
	}

}