package cn.doitedu.log;

import cn.doitedu.pojo.LogBean;
import com.alibaba.fastjson.JSON;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

/***
 * 运行一次，生成一条行为日志
 *

 /bigdata/kafka_2.12-2.6.2/bin/kafka-topics.sh --create --zookeeper node-1.51doit.cn:2181,node-2.51doit.cn:2181,node-3.51doit.cn:2181 --replication-factor 2 --partitions 4 --topic eagle-app-log

 **/
public class ActionLogGenOne {

    public static void main(String[] args) {

        Properties props = new Properties();
        props.setProperty("bootstrap.servers", "node-1.51doit.cn:9092,node-2.51doit.cn:9092,node-3.51doit.cn:9092");
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(props);

        LogBean logBean = new LogBean();
        //设备ID
        logBean.setDeviceId("000053");
        //行为ID
        logBean.setEventId("E");
        //行为属性（行为属性可以有0到多个）
        Map<String, String> ps = new HashMap();
        ps.put("p1", "v1");
        ps.put("p2", "v3");
        logBean.setProperties(ps);
        logBean.setTimeStamp(System.currentTimeMillis());

        String log = JSON.toJSONString(logBean);
        //System.out.println(log);
        //将生成的日志写入到Kafka
        ProducerRecord<String, String> record = new ProducerRecord<>("eagle-app-log", log);
        kafkaProducer.send(record);
        kafkaProducer.flush();
        kafkaProducer.close();
    }
}