package com.cold.flink;

import com.alibaba.fastjson2.JSON;
import com.cold.common.domain.AlertMessage;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.LocalStreamEnvironment;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaException;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.kafka.clients.consumer.KafkaConsumer;

import java.util.Properties;

/**
 * Flink处理实时报警消息的任务
 */
public class MessageStreamingJob {

    //消息来源主题 对应数据采集服务
    public static final String FROM_TOPIC = "cold_chain_data_collection";
    //消息目标主题
    public static final String TO_TOPIC = "cold_chain_data_druid";

    public static void main(String[] args) throws Exception {
        //获得运行环境 createLocalEnvironment 本地运行 getExecutionEnvironment 部署服务器
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironment();
        //设置检查点容错
        env.enableCheckpointing(5000);
        //检查点模式
        env.getCheckpointConfig().setCheckpointingMode(CheckpointConfig.DEFAULT_MODE);
        //设置重启策略
        env.getConfig().setRestartStrategy(RestartStrategies.fixedDelayRestart(4, 1000));
        //消费者kafka队列属性
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "192.168.101.222:9092");
        properties.setProperty("group.id", "coldflink");
        properties.setProperty("auto.offset.reset", "earliest");
        //创建Flink消费者，将从队列中读取消息
        FlinkKafkaConsumer<String> consumer = new FlinkKafkaConsumer<>(FROM_TOPIC,new SimpleStringSchema(),properties);
        consumer.setStartFromLatest();
        //输入数据流,进行数据转换 json --> AlertMessage --> 填充报警信息
        DataStream<AlertMessage> stream = env.addSource(consumer)
                .setParallelism(1)
                .map(msg -> JSON.parseObject(msg,AlertMessage.class))
                .map(new AlertMessageMap());
        //创建生产者，将计算完的数据发到下个队列
        FlinkKafkaProducer<String> producer = new FlinkKafkaProducer<>(TO_TOPIC,new SimpleStringSchema(),properties);
        //将流数据转json发给kafka
        stream.map((alertMessage -> JSON.toJSONString(alertMessage))).addSink(producer);
        producer.close();
        env.execute("冷链物流实时监控任务2.0");
    }
}
