package com.study.utils;

import com.study.Metric;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

/**
 * @author sjw
 * @className KafkaUtils
 * @date 2020/11/20 8:55
 * @description: TODO
 */
public class KafkaLogUtils {

    public static final String broker_list = "192.168.10.200:9092";
    public static final String topic = "logEvent";  // kafka topic，Flink 程序中需要和这个统一

    public static final String[] types = new String[]{"app", "web", "wechat", "h5"};
    public static final String[] levels = new String[]{"info", "warning", "error"};

    public static KafkaProducer getKafkaProducer(){
        Properties props = new Properties();
        props.put("bootstrap.servers", broker_list);
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); //key 序列化
        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); //value 序列化
        KafkaProducer producer = new KafkaProducer<String, String>(props);
        return producer;
    }

    public static void writeToKafka(KafkaProducer producer) throws InterruptedException {


        LogEvent event = new LogEvent();
        long ts = System.currentTimeMillis();
        event.setUserId((int)(ts%10000)+1);
        event.setType(types[(int) (ts % 4)]);
        event.setLevel(levels[(int) (ts % 3)]);
        event.setTimestamp(ts);


        ProducerRecord record = new ProducerRecord<String, String>(topic, null, null, JsonHelper.objectToJson(event));
        producer.send(record);
        System.out.println("发送数据: " + JsonHelper.objectToJson(event));

        producer.flush();
    }

    public static void main(String[] args) throws InterruptedException {
        KafkaProducer producer= getKafkaProducer();
        while (true) {
            Thread.sleep(3000);
            writeToKafka(producer);
        }
    }
}
