package com.audaque.springboot.foshanupload.web.flinkdemo.main;


import cn.hutool.json.JSONUtil;
import com.audaque.springboot.foshanupload.web.flinkdemo.configuration.KafkaConfig;
import com.audaque.springboot.foshanupload.web.flinkdemo.constants.KafkaGroupId;
import com.audaque.springboot.foshanupload.web.flinkdemo.constants.KafkaTopic;
import com.audaque.springboot.foshanupload.web.flinkdemo.model.bo.TemperatureFlink;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.springframework.stereotype.Component;

import javax.annotation.Resource;
import java.util.Properties;

/**
 * @Auther: gzq
 * @Date: 2021/1/31 - 01 - 31 - 22:54
 * @Description: nevt.test
 */
@Slf4j
@Component
public class KafkaFlinkStreaming {
    @Resource
    private KafkaConfig kafkaConfig;
    private StreamExecutionEnvironment env;

    public DataStream<String> createFlinkKafka(String topic) throws Exception {
        System.out.println("create flink Environment");
        env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //设置时间语义
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        Properties props = kafkaConfig.createPropsOfConsumer(KafkaGroupId.groupId, true);


        // 从kafka读取数据

        FlinkKafkaConsumer<String> stringFlinkKafkaConsumer = new FlinkKafkaConsumer<>(topic, new SimpleStringSchema(), props);
        // 从最新的数据开始消费
        // kafkaConsumer.setStartFromLatest();
        stringFlinkKafkaConsumer.setStartFromGroupOffsets();
        DataStream<String> dataStream = env.addSource(stringFlinkKafkaConsumer);

        return dataStream;

    }


    public void run() throws Exception {
        //数据源
        DataStream<String> inputDataStream = createFlinkKafka(KafkaTopic.kafkaTopicIn);
        //算子:打印
/*        SingleOutputStreamOperator<String> outputStream = inputDataStream
                .map(line -> {
                    String s = JSONUtil.toJsonStr(line);
                    log.debug("s:" + s);
                    return s;
                });*/
        //算子:修改
/*        SingleOutputStreamOperator<String> outputStream = inputDataStream  .flatMap(new FlatMapFunction<String, String>() {
                             @Override
                             public void flatMap(String content, Collector<String> collector) throws Exception {
                                 log.debug("Flink msg: {}", content);
                                 TemperatureFlink temperatureFlink = JSONUtil.toBean(content, TemperatureFlink.class);
                                 temperatureFlink.setPres(1);
                                 String contentSink = JSONUtil.toJsonStr(temperatureFlink);
                                 log.debug("Flink sink: {}", contentSink);
                                 collector.collect(contentSink);
                             }
                         });*/


        //算子：滚动窗口
        SingleOutputStreamOperator<String> outputStream = inputDataStream.map(new MapFunction<String, TemperatureFlink>() {
                    @Override
                    public TemperatureFlink map(String value) throws Exception {
                        TemperatureFlink temperatureFlink = JSONUtil.toBean(value, TemperatureFlink.class);
                        temperatureFlink.setPres(1);
                        return temperatureFlink;
                    }
                })
                .keyBy(TemperatureFlink::getId)
                .window(TumblingProcessingTimeWindows.of(Time.seconds(5)))
                .sum("pres")
                .map(new MapFunction<TemperatureFlink, String>() {
                    @Override
                    public String map(TemperatureFlink value) throws Exception {
                        String s = JSONUtil.toJsonStr(value);
                        return s;
                    }
                });


        //输出方式1：打印
        // outputStream.print("KafkaFlinkStreaming");
        //输出方式2：输出到kafka
        String etltopic = KafkaTopic.kafkaTopicOut;
        Properties sinkProperties = new Properties();
        sinkProperties.put("bootstrap.servers", "localhost:9092");
        FlinkKafkaProducer<String> kafkaSink = new FlinkKafkaProducer(etltopic,
                new SimpleStringSchema(),
                sinkProperties);
        outputStream.addSink(kafkaSink);

        System.out.println("flink taskManager start");
        env.execute();

    }


}
