package com.yunfei.commonbase.kafkaUtil;

import com.alibaba.fastjson.JSONObject;
import com.yunfei.commonbase.result.Result;
import io.swagger.annotations.ApiOperation;
import org.apache.kafka.clients.admin.*;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.KafkaFuture;
import org.apache.kafka.common.TopicPartition;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;

import java.util.*;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicReference;

@RestController
@RequestMapping(value = "/kafka")
public class KafkaController {


    @ApiOperation(value = "创建topic",notes="<pre>\n" +
            "参数说明：\n" +
            "{\n" +
            "\tString  topic              //要创建的topic  不能为空\n" +
            "\tString  protocol           //kafka的认证权限方式  没有权限为空  支持：SASL_PLAINTEXT\n" +
            "\tString  mechanism          //kafka的权限机制     没有权限为空  支持：PLAIN和SCRAM-SHA-512两种权限机制\n" +
            "\tString  servers            //kafka的连接地址 多个逗号隔开  不能为空\n" +
            "\tString  config             //kafka的来连接参数   没有权限为空  PLAIN权限机制:密码不加密 SCRAM-SHA-512权限机制:MD5加密\n" +
            "\tString  partition          //分区数  不传默认为1\n" +
            "\tString  replicationFactor  //副本数  不传默认为1\n" +
            "}\n" +
            "</pre>")
    @RequestMapping(value = "/createTopic", method = RequestMethod.POST)
    public Result createTopic(@RequestBody KafkaParm parm) throws ExecutionException, InterruptedException {
        Collection<NewTopic> newTopics = new ArrayList<>();
        if(isEmpty(parm.getPartition()))
            parm.setPartition("1");
        if(isEmpty(parm.getReplicationFactor()))
            parm.setReplicationFactor("1");

        NewTopic newTopic = new NewTopic(parm.getTopic(),Integer.parseInt(parm.getPartition()), Short.parseShort(parm.getReplicationFactor()));
        newTopics.add(newTopic);
        Properties props1 = new Properties();
        if(isNotEmpty(parm.getProtocol()))
            props1.put("security.protocol", parm.getProtocol());
        if(isNotEmpty(parm.getMechanism()))
            props1.put("sasl.mechanism", parm.getMechanism());
        props1.put("bootstrap.servers", parm.getServers());
        props1.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); //序列化方式将用户提供的key和vaule值序列化成字节
        props1.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        // 密码MD5 32位小写
        if(isNotEmpty(parm.getConfig()))
            props1.put("sasl.jaas.config", parm.getConfig());
        AdminClient adminClient =  KafkaAdminClient.create(props1);
        adminClient.createTopics(newTopics, new CreateTopicsOptions().timeoutMs(60000)).all().get();
        return new Result("消息转换和发送完成");
    }



    @ApiOperation(value = "获取topic列表",notes = "<pre>\n" +
            "参数说明：\n" +
            "{\n" +
            "\tString  topic              //要创建的topic  不能为空\n" +
            "\tString  protocol           //kafka的认证权限方式  没有权限为空  支持：SASL_PLAINTEXT\n" +
            "\tString  mechanism          //kafka的权限机制     没有权限为空  支持：PLAIN和SCRAM-SHA-512两种权限机制\n" +
            "\tString  servers            //kafka的连接地址 多个逗号隔开  不能为空\n" +
            "\tString  config             //kafka的来连接参数   没有权限为空  PLAIN权限机制:密码不加密 SCRAM-SHA-512权限机制:MD5加密\n" +
            "}\n" +
            "</pre>")
    @RequestMapping(value = "/listTopic", method = RequestMethod.POST)
    public Result listTopic(@RequestBody KafkaParm parm) throws ExecutionException, InterruptedException {
        Properties props = new Properties();
        if(isNotEmpty(parm.getProtocol()))
            props.put("security.protocol", parm.getProtocol());
        if(isNotEmpty(parm.getMechanism()))
            props.put("sasl.mechanism", parm.getProtocol());
        props.put("bootstrap.servers", parm.getServers());
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); //序列化方式将用户提供的key和vaule值序列化成字节
        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        // 密码MD5 32位小写
        if(isNotEmpty(parm.getConfig()))
            props.put("sasl.jaas.config", parm.getConfig());
        AdminClient adminClient =  KafkaAdminClient.create(props);
        ListTopicsResult listTopicsResult = adminClient.listTopics();
        KafkaFuture<Collection<TopicListing>> listings = listTopicsResult.listings();
        Collection<TopicListing> topicListings = listings.get();
        List<String> topicList = new ArrayList<>();
        for (TopicListing topicListing : topicListings) {
            topicList.add(topicListing.name());
        }
        return new Result(topicList);
    }


    @ApiOperation(value = "发送kafka消息",notes = "<pre>\n" +
            "参数说明：\n" +
            "{\n" +
            "\tString  topic;       //主题topic不能为空\n" +
            "\tString  protocol;    //权限方法 没有为空支持：SASL_PLAINTEXT\n" +
            "\tString  mechanism;   //权限机制 没有为空 支持：PLAIN或 SCRAM-SHA-512\n" +
            "\tString  servers;     //kafka的连接地址多个逗号隔开 不能为空\n" +
            "\tString  config;      //kafka的连接参数  没有权限为空 PLAIN机制:密码不加密SCRAM-SHA-512机制:MD5加密\n" +
            "\tObject  msg;         //消息内容 支持：String  Map  List\n" +
            "}\n" +
            "</pre>")
    @RequestMapping(value = "/sendMsg", method = RequestMethod.POST)
    public Result sendMsg(@RequestBody SendStrParm parm) {

        if(isEmpty(parm.getServers())||isEmpty(parm.getTopic())){
            throw new RuntimeException("topic或servers不能为空");
        }

        String msg = "";
        Object obj = parm.getMsg();
        if(obj instanceof String){
            msg = obj+"";
        }else{
            msg = JSONObject.toJSONString(obj);
        }

        Properties props = new Properties();
        //集群地址，多个服务器用"，"分隔
        if(isNotEmpty(parm.getProtocol()))
            props.put("security.protocol", parm.getProtocol());
        if(isNotEmpty(parm.getMechanism()))
            props.put("sasl.mechanism", parm.getMechanism());
        if(isNotEmpty(parm.getConfig()))
            props.put("sasl.jaas.config", parm.getConfig());
        props.put("bootstrap.servers", parm.getServers());
        props.put("acks", "1");// -1 all ack;1 leader ack;0 none ack
        props.put("retries", 2);
        props.put("batch.size", 262144);
        props.put("linger.ms", 50);
        props.put("buffer.memory", 67108864);
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");

        //上报客户端指标数据，可选（不设置则无法在管理台查询客户端指标信息）
        //this.props..put("metric.reporters", "com.ctg.kafka.clients.reporter.KafkaClientMetricsReporter");
        //每个客户端实例使用不同的client.id
        //创建生产者
        Producer<String, String> producer = new KafkaProducer<>(props);
        //写入名为"test-partition-1"的topic
//            ProducerRecord<String, String> producerRecord = new ProducerRecord<>(topic, JSONObject.toJSONString(map));
        AtomicReference<String> errormsg = new AtomicReference<>("");
        producer.send(new ProducerRecord<String, String>(parm.getTopic(), msg), (recordMetadata, e) -> {
            if(e == null) {
                errormsg.set("发送成功：topic:"+recordMetadata.topic() + "--partition:" + recordMetadata.partition() + "--offset:" + recordMetadata.offset());

            } else {
                errormsg.set("发送失败："+e.getMessage());
                e.printStackTrace();
            }
        });
        producer.close();
        return new Result("发送成功");
    }



    @ApiOperation(value = "接收kafka消息",notes = "<pre>\n" +
            "参数说明：\n" +
            "{\n" +
            "\tString  topic;        //主题topic     不能为空\n" +
            "\tString  protocol;     //权限方式      没有权限为空       支持：SASL_PLAINTEXT\n" +
            "\tString  mechanism;    //权限机制      没有权限 为空      支持：PLAIN和SCRAM-SHA-512两种权限机制\n" +
            "\tString  servers;      //kafka连接地址 不能为空 多个逗号隔开\n" +
            "\tString  config;       //kafka权限参数 没有权限为空   PLAIN权限机制:密码不加密SCRAM-SHA-512权限机制:MD5加密\n" +
            "\tString  group;        //消费者组      可以为空 但不建议为空\n" +
            "}\n" +
            "</pre>")
    @RequestMapping(value = "/acceptMsg", method = RequestMethod.POST)
    public Result acceptMsg(@RequestBody AcceptParm parm) {

        if(isEmpty(parm.getTopic())||isEmpty(parm.getServers()))
            throw new RuntimeException("topic、servers、group不能为空");

        Properties props = new Properties();
        //集群地址，多个地址用"，"分隔
        if(isNotEmpty(parm.getProtocol()))
            props.put("security.protocol", parm.getProtocol());
        if(isNotEmpty(parm.getMechanism()))
            props.put("sasl.mechanism", parm.getMechanism());
        if(isNotEmpty(parm.getConfig()))
            props.put("sasl.jaas.config", parm.getConfig());
        props.put("bootstrap.servers",parm.getServers());
        props.put("group.id", parm.getGroup());
        props.put("enable.auto.commit", "true");
        props.put("auto.commit.interval.ms", "1000");
        props.put("auto.offset.reset", "earliest");
        props.put("session.timeout.ms", "30000");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        //上报客户端指标数据，可选（不设置则无法在管理台查询客户端指标信息）
        // propsMap.put("metric.reporters", "com.ctg.kafka.clients.reporter.KafkaClientMetricsReporter");
        //每个客户端实例使用不同的client.id
        props.put("client.id", "test-cleint-" + System.currentTimeMillis());
        //创建消费者
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
        // 订阅topic，可以为多个用,隔开，此处订阅了"test-partition-1", "test"这两个主题
        consumer.subscribe(Arrays.asList(parm.getTopic()));
        List<String> msgList = new ArrayList<>();
        long time = System.currentTimeMillis();
        //持续监听
        while(true){
            if(System.currentTimeMillis()-time>40*1000){
                break;
            }
            ConsumerRecords<String,String> consumerRecords = consumer.poll(5000);
            for(ConsumerRecord<String,String> consumerRecord : consumerRecords){
                msgList.add(consumerRecord.value()+"--topic:"+consumerRecord.topic()+"--partition:"+consumerRecord.partition()+"--offset:"+consumerRecord.offset());
                System.out.println("---读到["+parm.getTopic()+"]的消息---：" + consumerRecord.value()+"--topic:"+consumerRecord.topic()+"--partition:"+consumerRecord.partition()+"--offset:"+consumerRecord.offset());
            }
        }
        consumer.close();
        return new Result(msgList);
    }


    @ApiOperation(value = "接收指定offset的kafka消息",notes = "<pre>\n" +
            "参数说明：\n" +
            "{\n" +
            "\tString  topic;        //主题topic     不能为空\n" +
            "\tString  protocol;     //权限方式      没有权限为空       支持：SASL_PLAINTEXT\n" +
            "\tString  mechanism;    //权限机制      没有权限 为空      支持：PLAIN和SCRAM-SHA-512两种权限机制\n" +
            "\tString  servers;      //kafka连接地址 不能为空 多个逗号隔开\n" +
            "\tString  config;       //kafka权限参数 没有权限为空  PLAIN权限机制:密码不加密SCRAM-SHA-512权限机制:MD5加密\n" +
            "\tString  group;        //消费者组      可以为空 但不建议为空\n" +
            "\tString  partition;    //分区          不可为空\n" +
            "\tString  offset;       //偏移量        不可为空\n" +
            "}\n" +
            "</pre>")
    @RequestMapping(value = "/acceptMsgByOffset", method = RequestMethod.POST)
    public Result acceptMsgByOffset(@RequestBody AcceptParm parm) {

        if(isEmpty(parm.getTopic())||isEmpty(parm.getServers())||isEmpty(parm.getOffset())||isEmpty(parm.getPartition()))
            throw new RuntimeException("topic、servers、group、不能为空");

        Properties props = new Properties();
        //集群地址，多个地址用"，"分隔
        if(isNotEmpty(parm.getProtocol()))
            props.put("security.protocol", parm.getProtocol());
        if(isNotEmpty(parm.getMechanism()))
            props.put("sasl.mechanism", parm.getMechanism());
        if(isNotEmpty(parm.getConfig()))
            props.put("sasl.jaas.config", parm.getConfig());
        props.put("bootstrap.servers",parm.getServers());
        props.put("group.id", parm.getGroup());
        props.put("enable.auto.commit", "true");
        props.put("auto.commit.interval.ms", "1000");
        props.put("auto.offset.reset", "earliest");
        props.put("session.timeout.ms", "30000");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        //上报客户端指标数据，可选（不设置则无法在管理台查询客户端指标信息）
        // propsMap.put("metric.reporters", "com.ctg.kafka.clients.reporter.KafkaClientMetricsReporter");
        //每个客户端实例使用不同的client.id
        props.put("client.id", "test-cleint-" + System.currentTimeMillis());
        //创建消费者
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
        // 订阅topic，可以为多个用,隔开，此处订阅了"test-partition-1", "test"这两个主题
//        consumer.subscribe(Arrays.asList(topic));
        List<String> msgList = new ArrayList<>();
        while(true){
            if(msgList.size()!=0){
                break;
            }
            consumer.assign(Arrays.asList(new TopicPartition(parm.getTopic(), Integer.parseInt(parm.getPartition()))));
            consumer.seekToBeginning(Arrays.asList(new TopicPartition(parm.getTopic(), Integer.parseInt(parm.getPartition()))));
            consumer.seek(new TopicPartition(parm.getTopic(), Integer.parseInt(parm.getPartition())), Integer.parseInt(parm.getOffset()));
            ConsumerRecords<String,String> consumerRecords = consumer.poll(5000);
            for(ConsumerRecord<String,String> consumerRecord : consumerRecords){
                msgList.add(consumerRecord.value()+"--topic:"+consumerRecord.topic()+"--partition:"+consumerRecord.partition()+"--offset:"+consumerRecord.offset());
                System.out.println("---读到["+parm.getTopic()+"]的消息---：" + consumerRecord.value()+"--topic:"+consumerRecord.topic()+"--partition:"+consumerRecord.partition()+"--offset:"+consumerRecord.offset());
            }
        }
        consumer.close();
        return new Result(msgList);
    }





    private static  boolean isEmpty(String str){
        return null == str || "".equals(str);
    }

    private  static boolean isNotEmpty(String str){
        return null != str && !"".equals(str);
    }

}
