package com.lx.kafka.producer;

import ch.qos.logback.classic.Level;
import ch.qos.logback.classic.Logger;
import ch.qos.logback.classic.LoggerContext;
import com.alibaba.fastjson.JSONObject;
import com.lx.kafka.CallBack.MyCallBack;
import com.lx.kafka.Properties.ProducerProperties;
import com.lx.kafka.log.KafkaLog;
import com.spring.pojo.User;
import org.apache.kafka.clients.producer.*;
import org.slf4j.LoggerFactory;

import java.util.concurrent.ExecutionException;

/**
 * Description:
 * Copyright:   Copyright (c)2019
 * Company:     zefu
 *
 * @author: 张李鑫
 * @version: 1.0
 * Create at:   2022-02-19 13:06:11
 * <p>
 * Modification History:
 * Date         Author      Version     Description
 * ------------------------------------------------------------------
 * 2022-02-19     张李鑫                     1.0         1.0 Version
 */
public class CustomProducer {


    public static void main(String[] args) throws ExecutionException, InterruptedException {
        KafkaLog.closeLog();
        ProducerProperties producerProperties = new ProducerProperties();
        KafkaProducer<String, String> producer = new KafkaProducer<String, String>(producerProperties.properties);
        send(new MyCallBack(), producer);
//        asySend(new MyCallBack(), producer);
        producer.close();
    }

    /**
     * 同步发送
     *
     * @param callBack 返回值
     * @param producer 发送者
     * @throws ExecutionException
     * @throws InterruptedException
     */
    public static void send(Callback callBack, KafkaProducer<String, String> producer) throws ExecutionException, InterruptedException {
        for (int i = 0; i < 10; i++) {
            //设置发送信息
            String topic = "topic02";
            String value = "zlx";

            User user = new User();
            user.setName("lx");
            user.setAge(18);
            user.setId(12);

            //创建发送记录
            ProducerRecord<String, String> record = new ProducerRecord<String, String>(topic, JSONObject.toJSON(user).toString());
            //发送 get是同步发送
            RecordMetadata recordMetadata = producer.send(record, callBack).get();
//            System.out.println(recordMetadata.partition());

        }
        //关闭流

    }


    /**
     * <p>
     * 异步发送
     * </p>
     *
     * @param callBack
     * @param producer
     * @return void
     * @Author zlx
     * @date 2022/2/19
     */
    public static void asySend(Callback callBack, KafkaProducer<String, String> producer) {
        for (int i = 0; i < 10; i++) {
            //设置发送信息
            String topic = "topic02";
            String value = "value" + i;

            //创建发送记录
            ProducerRecord<String, String> record = new ProducerRecord<String, String>(topic, value);

            //发送
            producer.send(record, callBack);

        }
        //关闭流
    }


    public static void asySend(KafkaProducer<String, String> producer) {
        for (int i = 0; i < 100; i++) {
            //设置发送信息
            String topic = "topic02";
            String value = "value" + i;

            //创建发送记录
            ProducerRecord<String, String> record = new ProducerRecord<String, String>(topic, value);

            //发送
            producer.send(record);

        }
        //关闭流
        producer.close();
    }

}
