package com.pk.flink.apps;

import com.pk.flink.partitioner.PKPartitioner;
import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.errors.AuthorizationException;
import org.apache.kafka.common.errors.OutOfOrderSequenceException;
import org.apache.kafka.common.errors.ProducerFencedException;
import org.apache.kafka.common.serialization.StringSerializer;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.util.Properties;


public class KafkaTopicStrategyCustomApp {

    private static String SERVERS = "master:9092,slave1:9092,slave2:9092";
    private static String TOPIC = "pk-2-2";
    KafkaProducer<String, String> producer = null;

    @Before
    public void setUp(){
        Properties props = new Properties();
        props.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, "my-transactional-id");
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        props.put(ProducerConfig.PARTITIONER_CLASS_CONFIG, PKPartitioner.class.getName());
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS);

        props.put(ProducerConfig.BATCH_SIZE_CONFIG,16384);
        props.put(ProducerConfig.LINGER_MS_CONFIG,3000);
        props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG,"snappy");
        props.put(ProducerConfig.BUFFER_MEMORY_CONFIG,33554432);
        props.put(ProducerConfig.MAX_BLOCK_MS_CONFIG,60000 );
        props.put(ProducerConfig.ACKS_CONFIG,1);



        producer = new KafkaProducer<>(props);
        producer.initTransactions();

    }

    //No Callback
    @Test
    public void test01() {

        try {
            producer.beginTransaction();
            for (int i = 0; i < 10; i++)
                producer.send(new ProducerRecord<>(TOPIC,"pk-01", Integer.toString(i)), new Callback() {
                    @Override
                    public void onCompletion(RecordMetadata metadata, Exception exception) {
                        if(exception == null) {
                            System.out.println("Topic: " + metadata.topic() + " Partition:" + metadata.partition() + " Send OK");
                        }
                        else {
                            System.out.println("Topic: " + metadata.topic() + " Partition:" + metadata.partition() + exception.getMessage());
                        }
                    }
                });

            System.out.println("=================================");

            for (int i = 0; i < 10; i++)
                producer.send(new ProducerRecord<>(TOPIC,"ruoze-01", Integer.toString(i)), new Callback() {
                    @Override
                    public void onCompletion(RecordMetadata metadata, Exception exception) {
                        if(exception == null) {
                            System.out.println("Topic: " + metadata.topic() + " Partition:" + metadata.partition() + " Send OK");
                        }
                        else {
                            System.out.println("Topic: " + metadata.topic() + " Partition:" + metadata.partition() + exception.getMessage());
                        }
                    }
                });

            producer.commitTransaction();
        } catch (ProducerFencedException | OutOfOrderSequenceException | AuthorizationException e) {
            // We can't recover from these exceptions, so our only option is to close the producer and exit.
            producer.close();
        } catch (KafkaException e) {
            // For all other exceptions, just abort the transaction and try again.
            producer.abortTransaction();
        }

    }


    @After
    public void tearDown(){
        producer.close();
    }
}
