package tk.xboot.kfk;

import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

import java.io.IOException;
import java.io.InputStream;
import java.util.*;


/**
 *
 * Demo consumer of kafka client.
 *
 *   Properties props = new Properties();
 *   props.put("bootstrap.servers", "localhost:9092");
 *   props.put("group.id", "test");
 *   props.put("enable.auto.commit", "true");
 *   props.put("auto.commit.interval.ms", "1000");
 *   props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
 *   props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
 *
 *   For more details, please visit :
 *   https://kafka.apache.org/10/javadoc/?org/apache/kafka/clients/consumer/KafkaConsumer.html
 *
 * @author Asin
 * @version 1.0
 */
public class DataConsumer {

    public static void main(String[] args) throws IOException {

        Properties props = new Properties();

        InputStream in = DataConsumer.class.getClassLoader().getResourceAsStream("kfk-consumer.conf");
        props.load(in);

        /* 定义consumer */
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
        /* 消费者订阅的topic, 可同时订阅多个 */
        consumer.subscribe(Arrays.asList(props.getProperty("topic")));

        Map<String, Double> map = new HashMap<>();
        map.put("Huobi_BTC_USDT",0.0d);
        map.put("Binance_BTC_USDT",0d);
        map.put("Bitfinex_BTC_USD",0d);

        /* 读取数据，读取超时时间为100ms */
        while (true) {
            print(consumer);
            //printDetails(consumer,map);
        }
    }


    private static void print( KafkaConsumer<String, String> consumer){
        //ConsumerRecords<String, String> records = consumer.poll(ofSeconds(1));
        ConsumerRecords<String, String> records = consumer.poll(1);
        for (ConsumerRecord<String, String> record : records){
            System.out.printf("get message from kafka:[ %s ]\n",record.value());
        }
        consumer.commitAsync();
    }

    private static void printDetails(KafkaConsumer<String, String> consumer ,Map<String,Double> map) throws IOException {


        ObjectMapper om = new ObjectMapper() ;

        // ConsumerRecords<String, String> records = consumer.poll(ofSeconds(1));
        ConsumerRecords<String, String> records = consumer.poll(1);
        for (ConsumerRecord<String, String> record : records){
            //System.out.printf("offset = %d, key = %s, value = %s, partition = %s, topic=%s",
                    //record.offset(), record.key(), record.value(),record.partition(),record.topic());

            List<LinkedHashMap<String,Object>> lst  = om.readValue(record.value(), ArrayList.class);
            Double d = Double.valueOf(lst.get(0).get("volume").toString());

            if (map.containsKey(record.key())) {
                map.put(record.key(), map.get(record.key()) + d);
            }/*else{
                map.put(record.key(), 1L);
            }*/
        }
        //System.out.printf("poll records: %d", records.count());
        String json = om.writeValueAsString(map);
        System.out.printf("topic info: %s\n", json);
        consumer.commitAsync();

    }
}
