package com.demo.kafka;

import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.Properties;

class ProducerCallback implements Callback {
//    private static final String TOPIC = "binlog-gather";
    private static final String TOPIC = "kafka2hdfs_test";
    private static final String BROKER_LIST = "node1.test.gmq.chinawayltd.com:9092,node2.test.gmq.chinawayltd.com:9092,node3.test.gmq.chinawayltd.com:9092";
//    private static final String BROKER_LIST = "dispatch.node1.test.gmq.chinawayltd.com:9094,dispatch.node2.test.gmq.chinawayltd.com:9094";
    private static KafkaProducer<String, String> producer;


    private static Properties initConfig() {
        Properties properties = new Properties();
        //指定连接broker列表
        //测试环境
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, BROKER_LIST);

        //对消息的key和value序列化
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        //回令类型
        properties.put(ProducerConfig.ACKS_CONFIG, "all");
        //批量提交大小
        properties.put(ProducerConfig.BATCH_SIZE_CONFIG, 16 * 1024);
        //重试次数
        properties.put(ProducerConfig.RETRIES_CONFIG, 0);
//        //主要是接入协议，sasl认证
//        properties.put("security.protocol", "SASL_PLAINTEXT");
//        //为kafak提供认证服务名称
//        properties.put("sasl.kerberos.service.name", "kafka");
//        properties.put("sasl.mechanism", "GSSAPI");
//        properties.put("producer.type", "sync");
        System.out.println("init success");

        return properties;
    }

    private Object messageObject;

    public ProducerCallback(Object messageObject) {
        this.messageObject = messageObject;
    }

    @Override
    public void onCompletion(RecordMetadata metadata, Exception exception) {
        //如果exception为null，则表示正常发送完成
        if (exception == null) {
            System.out.println("send msg " + messageObject + " success");
        }
    }

    /**
     * eventType,gpsStatus,gsm,imei,lat,time
     *
     * @param args
     */
    public static void main(String[] args) {

        System.out.println(StringDeserializer.class.getName());
        System.currentTimeMillis();
        Properties configs = initConfig();
        producer = new KafkaProducer<>(configs);
        for (int i = 0; i < 1000; i++) {
            JSONObject object = new JSONObject();
            /**
             * event_type:string:事件类型;gps_status:string:状态;
             * gsm:string:制式;imei:string:imei;lat:string:经度;
             * time:string:时间;whole_msg:string:完整的kafka消息
             */
            String msg = "{\"id\":"+i+",\"gps_status\":\"abcdefg\",\"gsm\":\"112321\",\"imei\":" + System.currentTimeMillis() + ",\"lat\":\"11.0\",\"time_local_cc\":\"123456\"}";
//            String msg = "{\"gsm\":\"abc\",\"event_type\":\"100056\",\"gps_status\":\"ok\",\"name\":\"liviatest\",\"imei\":\"106011111188888\",\"time\":1589785501537,\"lat\":\"11.0\"}";
            ProducerRecord<String, String> record = new ProducerRecord<>(TOPIC, String.valueOf(i), msg);
            producer.send(record);
            System.out.println("send a msg->" + msg);
        }
        producer.close();
    }
}