package com.gxkj.demo.kafka.controllers;


import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Properties;

/**
 * 参考：http://kafka.apache.org/documentation.html#producerapi
 * http://kafka.apache.org/0100/javadoc/index.html?org/apache/kafka/clients/producer/KafkaProducer.html
 */
public class ProducerDemo {
    public static void main(String[] args) {
//        Properties props = new Properties();
//        props.put("bootstrap.servers", "192.168.41.11:9092");
//        props.put("acks", "all");
//        props.put("retries", 0);
//        props.put("batch.size", 16384);
//        props.put("linger.ms", 1);
//        props.put("buffer.memory", 33554432);
//        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
//        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
//
//        Producer<String, String> producer = new KafkaProducer<>(props);
//        for(int i = 0; i < 10; i++)
//            producer.send(new ProducerRecord<String, String>("mykafka", Integer.toString(i), Integer.toString(i)));
//
//        producer.close();

        new ProducerDemo().produce();
    }


    public void produce()
    {
        Integer Partition = 1;

        Properties properties = new Properties();
        properties.put("bootstrap.servers", "192.168.41.11:9092");
        properties.put("acks","all");
        properties.put("key.serializer","org.apache.kafka.common.serialization.StringSerializer");
        properties.put("value.serializer","org.apache.kafka.common.serialization.ByteArraySerializer");
        properties.put("block.on.buffer.full","false");

        String schemaStr = "{\"type\":\"record\",\"name\":\"StringPair\",\"doc\":\"A pair of strings\",\"fields\":[{\"name\":\"left\",\"type\":\"string\",\"avro.java.string\":\"String\"},{\"name\":\"right\",\"type\":\"string\"}]}";
        Schema schema = new Schema.Parser().parse(schemaStr);
        GenericRecord datum = new GenericData.Record(schema);
        datum.put("left", "L");
        datum.put("right", "R");

        KafkaProducer<String, byte[]> messageProducer = new KafkaProducer<String, byte[]>(properties);
        ProducerRecord<String, byte[]> producerRecord = null;
        try {
            producerRecord = new ProducerRecord<String, byte[]>("test",
                    Partition,
                    "key",
                    datumToByteArray(schema, datum));
        } catch (IOException e) {
            e.printStackTrace();
        }
        messageProducer.send(producerRecord);
    }

    public static byte[] datumToByteArray(Schema schema, GenericRecord datum) throws IOException {
        GenericDatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(schema);
        ByteArrayOutputStream os = new ByteArrayOutputStream();
        try {
             Encoder e = EncoderFactory.get().binaryEncoder(os, null);
            writer.write(datum, e);
            e.flush();
            byte[] byteData = os.toByteArray();
            return byteData;
        } finally {
            os.close();
        }
    }
}
