package cn.nju.cloudcomputing.streaming;

import cn.nju.cloudcomputing.db.BeanListHandler;
import cn.nju.cloudcomputing.db.CRUDTemplate;
import cn.nju.cloudcomputing.db.RespInfo;
import org.apache.commons.collections.CollectionUtils;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.stream.Collectors;

/**
 * @BelongsProject: SparkProject
 * @BelongsPackage: cn.nju.cloudcomputing.streaming
 * @Author: shadowbynl
 * @CreateTime: 2022-11-16  19:49
 * @Description: TODO
 * @Version: 1.0
 */
public class MockGithubDataRealTime {
    private static final String SERVER_URL = "172.19.240.61:9092,172.19.240.168:9092,172.19.240.191:9092";
//    private static final String SERVER_URL = "spark01:9092,spark02:9092,spark03:9092";
    public static void main(String[] arg) throws InterruptedException {
        String topic = "test-2";
        //创建Kafka生产者
        KafkaProducer<String, String> producer = createKafkaProducer();
        mockRealTimeData(producer, topic);
    }
    public static KafkaProducer<String,String> createKafkaProducer(){
        //创建Properties对象，用于配置Kafka
        Properties props = new Properties();

        //指定服务器地址
        props.put("bootstrap.servers",
               SERVER_URL);// "spark01:9092,spark02:9092,spark03:9092"
        //序列化key
        props.put("key.serializer",
                "org.apache.kafka.common.serialization.StringSerializer");
        //序列化value
        props.put("value.serializer",
                "org.apache.kafka.common.serialization.StringSerializer");
        //Leader接收到消息后，需保证保持同步的Follower也同步消息，默认参数为1
//        props.put("acks", "all");
        //创建Kafka生产者对象KafkaProducer
        KafkaProducer<String,String> kafkaProducer
                = new KafkaProducer<String, String>(props);

        return kafkaProducer;
    }

    /**
     * 模拟数据
     * @return
     * @throws InterruptedException
     */
    public static void mockRealTimeData(KafkaProducer<String, String> producer, String topic) throws InterruptedException {
        int offset = 0;
        while (true) {
            String sql = String.format("select * from repo_info limit %d, %d", offset, 10000);
            List<RespInfo> respInfoList = CRUDTemplate.executeQuery(sql, new BeanListHandler<>(RespInfo.class));
            offset += 10000;
            if (CollectionUtils.isEmpty(respInfoList)) {
                break;
            }
            List<String> mockDataList = new ArrayList<>();
            int off = offset;
            respInfoList.stream().peek(e -> {
                String mockData = String.format("%d,%s,%s,%s,%d,%d,%d,%d,%s",
                        off / 10000, e.getUser_name(), e.getRepo_name(), e.getLanguage(),
                        e.getFork_count(), e.getStar_count(), e.getIssue_count(), e.getSize(), e.getLicense());
                mockDataList.add(mockData);
            }).collect(Collectors.toList());
            mockDataList.stream().peek(e -> {
                producer.send(new ProducerRecord<>(topic, e));
            }).collect(Collectors.toList());

            System.out.println(String.format("批次%d发送成功", off / 10000));
            Thread.sleep(20000);
        }

    }
}
