package com.itzmn.tmall.sparkstream;

/*
 * @Author: 张梦楠
 * @Date: 2019/7/9 11:39
 * 简书：https://www.jianshu.com/u/d611be10d1a6
 * 码云：https://gitee.com/zhangqiye
 * @Description: sparkstreaming模块，将处理完的数据发送到kafka的realtime topic中
 */

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;

public class KafkaProducerUtils {

    private static final String brokers = "192.168.33.3:9092,192.168.33.4:9092,192.168.33.5:9092";
    private static final String group_id = "tmall_online";
    private static final List<String> topic = new ArrayList<String>(Arrays.asList("process"));

    //3. 创建kafka的参数
    HashMap<String, Object> kafkaParams = null;
    KafkaProducer producer = null;

    KafkaProducerUtils(){

        kafkaParams = new HashMap<>();
        // 设置kafka集群地址
        kafkaParams.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers);
        // 设置key序列化类
        kafkaParams.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        kafkaParams.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);

        producer = new KafkaProducer(kafkaParams);
    }


    public void sendMessage(String topic, String key, String msg){


        ProducerRecord<String, String> producerRecord = new ProducerRecord<>(topic, key, msg);
        producer.send(producerRecord);

    }
    public void sendMessage(String topic, String msg){

        ProducerRecord<String, String> producerRecord = new ProducerRecord<>(topic, null, msg);
        producer.send(producerRecord);

    }






}
