package personal.wang.producer;

import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.StringSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import personal.wang.job.TestPoint;
import personal.wang.job.TestPointV2;

import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.List;
import java.util.Objects;
import java.util.Properties;

/**
 * @className: ProducerHandler
 * @Description:
 * @Author: wangyifei
 * @Date: 2022/6/25 18:19
 */
public class ProducerHandler {
    private static final Logger logger = LoggerFactory.getLogger(ProducerHandler.class);
    protected KafkaProducer producer ;
    protected String kafkaBootstrap ;
    protected TestPointV2 testPoint ;
    protected String txID ;
    public void setTestPoint(TestPointV2 testPoint){
        this.testPoint = testPoint;
    }
    public void initProducer(String kafkaBootstrap , String txID){
        // 从 -D 的配置参数传进来的 producer 的配置。
        String producerConfigPath = System.getProperty("kafkaConfig");
        Properties prop = new Properties();
        if(StringUtils.isNoneEmpty(producerConfigPath)){
            try {
                File f = new File(producerConfigPath);
                prop.load(new FileInputStream(f));
            } catch (IOException e) {
                logger.info("{}" , e);
            }
        }else{
            prop.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG , "true");
            prop.put(ProducerConfig.LINGER_MS_CONFIG, 10);
            prop.put(ProducerConfig.COMPRESSION_TYPE_CONFIG , "lz4");
            prop.put(ProducerConfig.ACKS_CONFIG , "all");
            prop.put(ProducerConfig.RETRIES_CONFIG , 1);
            prop.put(ProducerConfig.BUFFER_MEMORY_CONFIG , 33554432);
            prop.put(ProducerConfig.BATCH_SIZE_CONFIG , 16384);
            prop.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG , 30000);
            prop.put(ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG , 120000);
            prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG , StringSerializer.class.getCanonicalName());
            prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG , StringSerializer.class.getCanonicalName());
            prop.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION , 1);
            prop.put(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG , 3600);
        }
        prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG , kafkaBootstrap);
        if(!Objects.isNull(txID)){
            prop.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, txID);
        }
        this.producer = new KafkaProducer(prop);
    }
    public static class TransactionProducer extends ProducerHandler implements DataMockProducer{

        @Override
        public void init(String kafkaBootstrap, String txID) {
            this.kafkaBootstrap = kafkaBootstrap ;
            this.txID = txID ;
            initProducer(this.kafkaBootstrap , this.txID);
            producer.initTransactions();
        }

        @Override
        public void close() {

        }

        @Override
        public void send(List<String> rds, StringBuilder sb, String topic) {
            if(Objects.isNull(rds) || rds.isEmpty()){
                return;
            }
            try{
                ProducerRecord<String , String> record = null ;
                producer.beginTransaction();
                for (String rd : rds) {
                    record = new ProducerRecord<>(topic , rd);

                    producer.send(record, new Callback() {
                        @Override
                        public void onCompletion(RecordMetadata metadata, Exception exception) {
                            if(!Objects.isNull(exception)){
                                logger.info("{}" , exception);
                            }
                        }

                    });
                    sb.append(rd + "\n");
                }
                producer.commitTransaction();
                testPoint.record(true , rds.size());
            }catch (Exception e){
                logger.info("{}" , e);
                testPoint.record(false , rds.size());
                producer.abortTransaction();
            }finally {
                producer.close();
            }
        }
    }
    public static class IdempotenceProducer extends ProducerHandler implements DataMockProducer{

        @Override
        public void init(String kafkaBootstrap, String txID) {
           initProducer(kafkaBootstrap , txID);
        }

        @Override
        public void close() {
            this.producer.close();
        }

        @Override
        public void send(List<String> rds, StringBuilder sb, String topic) {
            if(Objects.isNull(rds) || rds.isEmpty()){
                return;
            }
            for (String rd : rds) {
                sb.append(rd);
                ProducerRecord<String , String> record = new ProducerRecord<String , String>(topic , rd);
                try{
                    producer.send(record, new Callback() {
                        @Override
                        public void onCompletion(RecordMetadata metadata, Exception e) {
                            if(!Objects.isNull(e)){
                                logger.info("{}" , e);
                                testPoint.record(false , 1);
                            }else {
                                testPoint.record(true , 1);
                            }
                        }
                    });
                    MyCallback callback = new MyCallback();
                    callback.setRecord(rd);
                    producer.send(record , callback);

                }catch (Exception e){
                    e.printStackTrace();
                }
            }
        }
    }
}
