package com.study.utils;

import com.study.Metric;
import com.study.entity.UserBehavior;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

/**
 * @author sjw
 * @className KafkaUtils
 * @date 2020/11/20 8:55
 * @description: TODO
 */
public class KafkaUserBehaviorUtils {

    public static final String broker_list = "192.168.10.200:9092";
    public static final String topic = "logEvent";  // kafka topic，Flink 程序中需要和这个统一

    public static final String[] behaviors = new String[]{"pv", "buy", "cart", "fav"};




    public static KafkaProducer getKafkaProducer(){
        Properties props = new Properties();
        props.put("bootstrap.servers", broker_list);
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); //key 序列化
        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); //value 序列化
        KafkaProducer producer = new KafkaProducer<String, String>(props);
        return producer;
    }

    public static void writeToKafka(KafkaProducer producer) throws InterruptedException {


        UserBehavior userBehavior = new UserBehavior();
        long ts = System.currentTimeMillis();
        userBehavior.setUserId((int)(ts%1000)+1);
        userBehavior.setCategoryId((int) (ts % 40));
        userBehavior.setItemId( ts % 1000);
        userBehavior.setBehavior(behaviors[(int) (ts % 4)]);

        userBehavior.setTimestamp(ts);


        ProducerRecord record = new ProducerRecord<String, String>(topic, null, null, JsonHelper.objectToJson(userBehavior));
        producer.send(record);
        System.out.println("发送数据: " + JsonHelper.objectToJson(userBehavior));

        producer.flush();
    }

    public static void writeToKafka(KafkaProducer producer, long timestamp) throws InterruptedException {


        UserBehavior userBehavior = new UserBehavior();
        long ts = System.currentTimeMillis();
        userBehavior.setUserId(1);//(int)(ts%10)+1);
        userBehavior.setCategoryId((int) (ts % 40));
        userBehavior.setItemId( ts % 1000);
        userBehavior.setBehavior(behaviors[(int) (ts % 4)]);

        userBehavior.setTimestamp(timestamp);


        ProducerRecord record = new ProducerRecord<String, String>(topic, null, null, JsonHelper.objectToJson(userBehavior));
        producer.send(record);
        System.out.println("发送数据: " + JsonHelper.objectToJson(userBehavior));

        producer.flush();
    }


    public static void main(String[] args) throws InterruptedException {
        KafkaProducer producer= getKafkaProducer();
//        //while (true) {
//            Thread.sleep(300);
        long ts = System.currentTimeMillis();
//
        //writeToKafka(producer,ts);
        //writeToKafka(producer,ts+20000);
//        //}
        int i=0;
        while (i<100000000){
            Thread.sleep(300);
            writeToKafka(producer);
            i++;
        }


//        writeToKafka(producer);
//        Thread.sleep(1000);
//        //writeToKafka(producer);
//        Thread.sleep(1000);
//        //writeToKafka(producer);
    }
}
