package com.youshang.schema;

import java.io.IOException;
import java.util.Properties;
import java.util.Random;

import io.confluent.kafka.serializers.KafkaAvroDeserializerConfig;
import io.confluent.kafka.serializers.KafkaAvroSerializer;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;

/**
 *  使用schema序列化，kafka传输的数据
 * @description:
 * @author: wangshuai
 * @create: 2020-01-03 14:17
 **/
public class KafkaCombineSchemaProducer {
    public static final String USER_SCHEMA = "{\"type\": \"record\", \"name\": \"User\", " +
            "\"fields\": [{\"name\": \"id\", \"type\": \"int\"}, " +
            "{\"name\": \"name\",  \"type\": \"string\"}, {\"name\": \"age\", \"type\": \"int\"}]}";

    public static void main(String[] args) throws InterruptedException {
        Properties props = new Properties();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.3.123:9092,192.168.3.124:9092,192.168.3.125:9092");
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class);
        // 使用Confluent实现的KafkaAvroSerializer
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class);
        // 添加schema服务的地址，用于获取schema
        props.put(KafkaAvroDeserializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://192.168.3.123:8081,http://192.168.3.124:8081,http://192.168.3.125:8081");
        Producer<String, GenericRecord> producer = new KafkaProducer<>(props);
        Schema.Parser parser = new Schema.Parser();
//        Schema schema = parser.parse(USER_SCHEMA);
        Schema schema = null;
        try {
            KafkaCombineSchemaProducer kafkaCombineSchemaProducer = new KafkaCombineSchemaProducer();
            schema = parser.parse(kafkaCombineSchemaProducer.getClass().getResourceAsStream("/avro/userModel.avsc"));
        }catch (IOException e) {
            e.printStackTrace();
        }

        Random rand = new Random();
        int id = 0;
        while (id < 10000000) {
            id++;
            String name = "name" + id;
            int age = rand.nextInt(40) + 1;
            GenericRecord user = new GenericData.Record(schema);
            user.put("id", id);
            user.put("name", name);
            user.put("age", age);
            ProducerRecord<String, GenericRecord> record = new ProducerRecord<>("test-topic", user);

            producer.send(record);
//            Thread.sleep(1000);
        }

        producer.close();
    }
}
