package com.example.controller;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.SendResult;
import org.springframework.util.concurrent.ListenableFuture;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import java.util.Random;

/**
 * @Author:zhuchanghua
 * @Date: 17:50 2018/9/11
 */
@RestController
@RequestMapping(value = "/kafka")
public class KafkaController {
    @Autowired
    private KafkaTemplate<String,String> kafkaTemplate;

    private static Logger logger = LoggerFactory.getLogger(KafkaController.class);

    private Random random = new Random();

    private void send(String key, String data) {
        //kafkaTemplate.send("test", key, data);
        int partitionIndex = random.nextInt(3);
        ListenableFuture<SendResult<String, String>> result
                = kafkaTemplate.send("newtopic", partitionIndex, key, data);
    }

    @RequestMapping("/kafka")
    public String testKafka() {
        int iMax = 6;
        for (int i = 1; i < iMax; i++) {
            send("newtopic" + i, "datai" + i);
        }
        return "success";
    }

//    @KafkaListener(topics = "test")
//    public void receive(ConsumerRecord<?, ?> consumer) {
//        logger.info("{} - {}:{}", consumer.topic(), consumer.key(), consumer.value());
//    }
}
