package mockdata;

import pojo.LogBean;
import com.alibaba.fastjson.JSON;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.HashMap;
import java.util.Properties;

/**
 * 	appId            ;
 * 	appVersion       ;
 * 	carrier          ;
 * 	deviceId         ;
 * 	deviceType       ;
 * 	ip               ;
 * 	latitude         ;
 * 	longitude        ;
 * 	netType          ;
 * 	osName           ;
 * 	osVersion        ;
 * 	releaseChannel   ;
 * 	resolution       ;
 * 	sessionId        ;
 * 	timeStamp        ;
 * 	eventId          ;
 * 	hashmMap<String,String>
 *
 */

public class ActionLogGen {
    public static void main(String[] args) throws InterruptedException {
        for (int i = 0; i <2 ; i++) {

            new Thread(new Runnable() {
                @Override
                public void run() {
                    Properties kafkaproperties = new Properties();
                    kafkaproperties.setProperty("bootstrap.servers", "master3:9092,master4:9092,master5:9092");
                    kafkaproperties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
                    kafkaproperties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
                    kafkaproperties.put("acks", "all");
                    kafkaproperties.put("batch.size",1024);  //这里1k,默认是16k。当发送数据的时候，不用等到batchsize16k才发送,这样可以更快的发送，但吞吐有影响
                    kafkaproperties.put("linger.ms",1);//当>0时 ，延时会增加，但会减少发送频率，提供吞吐。只有当batch填满或者达到这个时间，就会发送方数据。

                    KafkaProducer<String, String> KafkaProducer = new KafkaProducer<>(kafkaproperties);
                    while (true){
                        LogBean logBean = new LogBean();
                        // 生成的账号形如： 98
                        String account = StringUtils.leftPad(RandomUtils.nextInt(1, 100) + "", 2, "0");
                        logBean.setAccount(account);
                        logBean.setAppId("cn.doitedu.yinew");
                        logBean.setAppVersion("2.5");
                        logBean.setCarrier("中国移动");

                        //todo   deviceid直接用account,这里和花香的rowk一样两位数
                        logBean.setDeviceId(account);

                        logBean.setIp("10.102.36.88");
                        logBean.setLatitude(RandomUtils.nextDouble(10.0, 52.0));
                        logBean.setLongitude(RandomUtils.nextDouble(120.0, 160.0));
                        logBean.setDeviceType("mi6");
                        logBean.setNetType("5G");
                        logBean.setOsName("android");
                        logBean.setOsVersion("7.5");
                        logBean.setReleaseChannel("小米应用市场");
                        logBean.setResolution("2048*1024");
                        logBean.setEventId(RandomStringUtils.randomAlphabetic(1).toUpperCase());

                        HashMap<String, String> properties = new HashMap<String, String>();
                        for (int i = 0; i < RandomUtils.nextInt(1, 5); i++) {
                            // 生成的属性形如：  p1=v3, p2=v5, p3=v3,......
                            properties.put("p" + RandomUtils.nextInt(1, 10), "v" + RandomUtils.nextInt(1, 10));
                        }
                        logBean.setProperties(properties);
                        String message = JSON.toJSONString(logBean);

                        System.out.println(message);
                        KafkaProducer.send(new ProducerRecord<>("app_log", message));

                        try {
                            Thread.sleep(RandomUtils.nextLong(1000,2000));
                        } catch (InterruptedException e) {
                            e.printStackTrace();
                        }
                    }
                }
            }).start();

        }

    }

}
