package com.dili.dd.flume.plugin.kafka;

import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.flume.*;
import org.apache.flume.conf.Configurable;
import org.apache.flume.sink.AbstractSink;

import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;

public class KafkaSink extends AbstractSink implements Configurable {

    private static final Log logger = LogFactory.getLog(KafkaSink.class);

    /**
     * 配置参数
     */
    private String topic;
    private String host;
    private int port;

    /**
     * 配置参数的默认值
     */
    private static final String DEFAULT_TOPIC = "flume-log";
    private static final String DEFAULT_HOST = "kafkanode1";
    private static final int DEFAULT_PORT = 9092;
    private static final String DEFAULT_BROKER = "kafkanode1:9092";

    private Producer producer;


    @Override
    public Status process() throws EventDeliveryException {

        Channel channel =getChannel();
        Transaction tx =channel.getTransaction();
        try {
            tx.begin();
            Event e = channel.take();
            if(e ==null) {
                tx.rollback();
                return Status.BACKOFF;
            }
            KeyedMessage<String,String> data = new KeyedMessage<String, String>(topic,new String(e.getBody()));
            producer.send(data);
            logger.info("Message: {}"+new String( e.getBody()));
            tx.commit();
            return Status.READY;
        } catch(Exception e) {
            logger.error("KafkaSinkException:{}",e);
            tx.rollback();
            return Status.BACKOFF;
        } finally {
            tx.close();
        }
    }

    @Override
    public void configure(Context context) {
        logger.info("初始化kafakSink参数配置");
        topic = context.getString("mq.topic",DEFAULT_TOPIC);
        host = context.getString("mq.host",DEFAULT_HOST);
        port = context.getInteger("mq.port", DEFAULT_PORT);
        String brokers = context.getString("mq.brokers",DEFAULT_BROKER);

        Properties props = new Properties();
        props.setProperty("metadata.broker.list",brokers);
        props.setProperty("serializer.class","kafka.serializer.StringEncoder");
        props.put("request.required.acks","1");
        ProducerConfig config = new ProducerConfig(props);

        producer = new Producer<String, String>(config);
        logger.info("初始化kafakSink参数配置结束");

    }

    public static void main(String[] args) {
        testReceive();
    }


    private static ConsumerConfig createConsumerConfig()
    {
        Properties props = new Properties();
        props.put("zookeeper.connect", KafkaProperties.zkConnect);
        props.put("group.id", KafkaProperties.groupId);
        props.put("zookeeper.session.timeout.ms", "40000");
        props.put("zookeeper.sync.time.ms", "200");
        props.put("auto.commit.interval.ms", "1000");
        return new ConsumerConfig(props);
    }

    private static void testReceive() {
        ConsumerConnector consumer  = kafka.consumer.Consumer.createJavaConsumerConnector(createConsumerConfig());;

        String topic = "flume-log";
        Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
        topicCountMap.put(topic, new Integer(1));
        Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
        KafkaStream<byte[], byte[]> stream = consumerMap.get(topic).get(0);
        ConsumerIterator<byte[], byte[]> it = stream.iterator();
        while (it.hasNext()) {
            System.out.println("receive：" + new String(it.next().message()));
            try {
                Thread.sleep(3000);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }
    }



    private static void testSend() {
        KafkaSink ks = new KafkaSink();

        ks.topic = "flume-log";

        String msg = "flume test msg";

        Properties props = new Properties();
        props.setProperty("metadata.broker.list","kafkanode1:9092");
        props.setProperty("serializer.class","kafka.serializer.StringEncoder");
//           props.setProperty("producer.type", "async");
//           props.setProperty("batch.num.messages", "1");
//        props.put("request.required.acks","1");
        ProducerConfig config = new ProducerConfig(props);
        Producer<String, String> producer = new Producer<String, String>(config);
        KeyedMessage<String, String> data = new KeyedMessage<String, String>(ks.topic,msg);
        try {
            producer.send(data);
        } catch (Exception e) {
            e.printStackTrace();
        }
        producer.close();
    }

    public interface KafkaProperties
    {
        final static String zkConnect = "node2:2181,node3:2181,node4:2181";
        final static String groupId = "group1";
        final static String topic = "flume-log";
        final static String kafkaServerURL = "kafkanode1";
        final static int kafkaServerPort = 9092;
        final static int kafkaProducerBufferSize = 64 * 1024;
        final static int connectionTimeOut = 20000;
        final static int reconnectInterval = 10000;
        final static String topic2 = "topic2";
        final static String topic3 = "topic3";
        final static String clientId = "SimpleConsumerDemoClient";
    }
}