package com.itzmn.tmall.sparkstream;

/*
 * @Author: 张梦楠
 * @Date: 2019/7/8 21:14
 * 简书：https://www.jianshu.com/u/d611be10d1a6
 * 码云：https://gitee.com/zhangqiye
 * @Description: sparkstreaming处理类，将kafka topic中的日志进一步处理，输出到另一个topic，供业务模块使用
 */

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.spark.SparkConf;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka010.ConsumerStrategies;
import org.apache.spark.streaming.kafka010.KafkaUtils;
import org.apache.spark.streaming.kafka010.LocationStrategies;

import javax.tools.DocumentationTool;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;

public class SparkStreamingProcesser {

    private static final String brokers = "192.168.33.3:9092,192.168.33.4:9092,192.168.33.5:9092";
    private static final String group_id = "tmall_online";
    private static final List<String> topic = new ArrayList<String>(Arrays.asList("process"));

    private static final String toTopic = "realtime";


    public static void main(String[] args) {
        //1. 得到spark上下文
        SparkConf conf = new SparkConf().setAppName("tmall_online").setMaster("local[*]");

        //2. 创建sparkstreamingcontext。每隔2钟会处理一次收集到的数据
        JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(10));

        //3. 创建kafka的参数
        HashMap<String, Object> kafkaParams = new HashMap<String, Object>();

        // 设置kafka集群地址
        kafkaParams.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers);
        // 设置消费者组
        kafkaParams.put(ConsumerConfig.GROUP_ID_CONFIG, group_id);
        // 设置key反序列化类
        kafkaParams.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        kafkaParams.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);


        // 4. 通过参数创建一个kafka stream
        JavaInputDStream<ConsumerRecord<Object, Object>> stream = KafkaUtils.createDirectStream(
                jssc,
                LocationStrategies.PreferConsistent(),
                ConsumerStrategies.Subscribe(topic, kafkaParams));

        // 5.获取数据并处理
        JavaDStream<String> map = stream.map(msg-> msg.value().toString());


        JavaDStream<Object> map1 = map.map(x -> {
            KafkaProducerUtils producerUtils = new KafkaProducerUtils();
            String[] split = x.split(",");
            System.out.println("接收到：" + split[1]+","+split[2]);
            producerUtils.sendMessage(toTopic, split[3]+","+split[4]+","+split[5]);
            return "a";
        });
        map1.print();


        jssc.start();

        try {
            jssc.awaitTermination();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }finally {
            jssc.close();
        }

    }


}
