package kafka;

import kafka.admin.AdminUtils;
import kafka.utils.ZkUtils;
import net.sf.json.JSONObject;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.security.JaasUtils;

import java.text.ParseException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;

/**
 * 消费者
 * USER: lintc 【lintiancong@zhuojianchina.com】
 * DATE: 2017-01-21 11:37
 */
public class KafkaCustomer {

//    public static String topic = HospitalTopic.ZYYY1;

    public static void main(String[] args) throws ParseException {
        String topic = "nuts_jxyy";
//        removeTopic(topic);
        getMsg(topic);
//        KafkaUtil.getTopicList(getConsumer());
    }

    public static void removeTopic(String topic) {
        ZkUtils zkUtils = ZkUtils.apply("182.254.218.51:2181,118.89.105.52:2181,115.159.113.120:2181", 30000, 30000, JaasUtils.isZkSecurityEnabled());
        AdminUtils.deleteTopic(zkUtils, topic);
        zkUtils.close();
    }

    /**
     * 按 TOPIC 读取消息
     * @param topic
     */
    public static void getMsg(String topic) {
        KafkaConsumer<String, String> consumer = KafkaCustomer.getConsumer();
        consumer.subscribe(Arrays.asList(topic));
//        MongoCollection<Document> collection = MongoTest.getCollection("api_log");
        int n = 1;
        List<JSONObject> list = new ArrayList<>();
        while (true) {
            ConsumerRecords<String, String> records = consumer.poll(50);
//            consumer.seekToBeginning(null);
//            for (TopicPartition topicPartition : records.partitions()) {
//                System.out.println(topicPartition.toString());
//            }
            for (ConsumerRecord<String, String> record : records) {
//                System.out.println(record.toString());
                String value = record.value();
                System.out.println(value);
//                int i = value.indexOf("{");
//                String timeStr = value.substring(0, 10);
//                System.out.println((++n) + " : " + record.partition() + " : " + timeStr);
//                value = value.substring(i, value.length());
//                JSONObject obj = JSONObject.fromObject(value);
//                obj.put("time", timeStr);
//                obj.put("topic", topic);
//                System.out.println(topic + " : " + (n++));
//                System.out.println(obj);
//                list.add(obj);
//                if (list.size() == 50) { // 每50条执行一次插入
//                    long start = System.currentTimeMillis();
//                    MongoTest.insertList(collection, list);
//                    long end = System.currentTimeMillis();
//                    list = new ArrayList<>();
//                    System.out.println(topic + " ： 添加成功 ：" + n + " : " + (end - start));
//                }
//                MongoTest.insertOne(collection, obj); // 插入 mongo

//                System.out.println(timeStr + " : " + obj);
//                System.out.printf("offset = %d, key = %s, value = %s\n", record.offset(), record.key(), record.value());
            }
        }
    }

    /**
     * 获取一个消费者
     * @return
     */
    public static KafkaConsumer<String, String> getConsumer() {
        Properties props = new Properties();
        // kafka 地址
        props.put("bootstrap.servers", "115.159.113.120:9092");
        props.put("group.id", "te11_1234");
//        props.put("auto.offset.reset", "earliest");
        props.put("enable.auto.commit", "true");
        props.put("auto.commit.interval.ms", "1000");
        props.put("session.timeout.ms", "30000");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
        return consumer;
    }

    public static KafkaConsumer<String, String> getConsumer(String group) {
        Properties props = new Properties();
        // kafka 地址
        props.put("bootstrap.servers", "182.254.218.51:9092,118.89.105.52:9092,115.159.113.120:9092");
        props.put("group.id", group);
        // props.put("auto.offset.reset", "earliest");
        props.put("enable.auto.commit", "true");
        props.put("auto.commit.interval.ms", "1000");
        props.put("session.timeout.ms", "30000");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
        return consumer;
    }

}
