package com.kafka;

import com.google.gson.Gson;
import com.util.CsvUtil;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;

public class SendDataToKafkaProducer {
    private static KafkaProducer<String, String> producer;
    final static Logger logger = LoggerFactory.getLogger(SendDataToKafkaProducer.class);
    private static String kafka_server = CsvUtil.getSysProperty("kafka.servers");    //kafka客户端地址  broker



    public SendDataToKafkaProducer(){
        //单例模式，防止重复创建producer
        if (producer == null) {
            Properties props = new Properties();
            props.put("bootstrap.servers", kafka_server);
            props.put("acks", "all");
            props.put("retries", 0);////如果请求失败，生产者也会自动重试，即使设置成０ the producer can automatically retry.
            props.put("batch.size", 16384);
            props.put("linger.ms", 1);
            props.put("buffer.memory", 33554432);
            props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
            props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
            //设置分区类,根据key进行数据分区
            producer = new KafkaProducer<String, String>(props);
        }
    }
    /**
     * 将温度数据发送到kafka
     * @param time
     * @param key
     * @param data
     */
    public void produceOne(String time, String key, String data,String topic) {
        Future<RecordMetadata> future= producer.send(new ProducerRecord<String, String>(topic, key, data));
    }

    public void produceOne( String data,String topic) {
        producer.send(new ProducerRecord<String, String>(topic, data));
    }

    /**
     * 将站点状态信息发送到kafka
     * @param time
     * @param key
     * @param data
     */
    public void produceErrorOne(String time, String key, String data,String topic) {
        Gson gson = new Gson();
        Map<String, Object> map = new HashMap<String, Object>();
        map.put("time", time);
        map.put("data", data);
        String objectJson = gson.toJson(map);
        producer.send(new ProducerRecord<String, String>(topic, key, objectJson));//此行代码必须要
    }


    public void produce(String str){//测试方法
       /* for (int i = 30;i<40;i++){
            String key = String.valueOf(i);
            String data = "hello kafka message："+key;
            producer.send(new ProducerRecord<String, String>(TOPIC,key,data));
            System.out.println(data);
        }
        producer.close();*/
        //String key="00101010020202020";
        Gson gson = new Gson();
        int i=0;
        while(true){
            //String key = String.valueOf(i);
            //String data = "hello kafka message：中文"+key;
            Map<String,Object> map=new HashMap<String,Object>();
            map.put("time", new Date().getTime());
            map.put("data", str);
            String objectJson =gson.toJson(map);
            producer.send(new ProducerRecord<String, String>("newTopic", "key", objectJson));
            //
            //produceOne(objectJson);
            //System.out.println("生产者m: " + i);
            i++;
            try {
                TimeUnit.MILLISECONDS.sleep(20);
                i++;
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
		/*	if(i>=100)
			{
				System.out.println("终止: " + i);
				break;
			}*/
        }
    }


    public static void main(String[] args) {
        new SendDataToKafkaProducer().produce("发送内容");
    }
}
