package com.itheima.cold.flink;


import com.alibaba.fastjson.JSON;
import com.itheima.cold.common.netty.entity.MessageEntity;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;

import java.util.Properties;

public class StreamingJob {
    public static void main(String[] args) throws Exception {
        //队列名称
        String kafka_topic = "device_all_topic";
        String druid_topic = "all_device_message";
        //设置环境信息
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置容错
        env.enableCheckpointing(5000);
        //设置检查点
        env.getCheckpointConfig().setCheckpointingMode(CheckpointConfig.DEFAULT_MODE);
        //重启策略
        env.getConfig().setRestartStrategy(RestartStrategies.fixedDelayRestart(4, 1000));

        //设置kafka队列属性
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "192.168.1.15:9092");
        properties.setProperty("group_id", "coldflink");
        properties.setProperty("auto.offset.reset", "earliest");

        //kafka的consumer,接受netty发送的数据
        FlinkKafkaConsumer<String> consumer = new FlinkKafkaConsumer<String>(kafka_topic, new SimpleStringSchema(), properties);
        consumer.setStartFromEarliest();

        //设置数据源
        DataStream<MessageEntity> stream = env.addSource(consumer)
                .setParallelism(1)
                .map(String -> JSON.parseObject(String, MessageEntity.class));
        //从redis获取阈值数据,与kafka数据源合并
        DataStream<MessageEntity> outStream = stream.map(new MessageAggregate());

        //存储到MySQL
        outStream.addSink(new SinkToMySQL());

        //数据发送到kafka
        FlinkKafkaProducer<String> myProducer = new FlinkKafkaProducer<String>(druid_topic, new SimpleStringSchema(), properties);
        outStream.map(new MapFunction<MessageEntity, String>() {
            @Override
            public String map(MessageEntity messageEntity) throws Exception {
                return JSON.toJSONString(messageEntity);
            }
        }).addSink(myProducer);
        myProducer.close();
        //执行出现
        env.execute("冷链设备实时监控");
    }
}
