package com.iflytek.controller;

import com.alibaba.fastjson.JSON;
import com.iflytek.bean.AverageSpeed;
import com.iflytek.bean.MonitorInfo;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.SlidingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;

import java.sql.PreparedStatement;
import java.util.Properties;

public class _02AverageSpeedMonitorController {
    public static void main(String[] args) throws Exception {
        // 1. 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 设置任务的并行度为 1，即所有算子都只会有一个并行实例。
        env.setParallelism(1);
        // 2. 从kafka中读取数据
        Properties prop = new Properties();
        prop.setProperty("bootstrap.servers", "hadoop102:9092,hadoop103:9092,hadoop104:9092");
        prop.setProperty("group.id", "car-groop2");

        FlinkKafkaConsumer<String> consumer = new FlinkKafkaConsumer<>("topic-car", new SimpleStringSchema(), prop);
        //     将JSON数据的字符串转换为MonitorInfo对象
        // 将 Kafka 消费者添加到 Flink 环境中，得到一个 DataStreamSource<String> 类型的数据流 ds1。
        DataStreamSource<String> ds1 = env.addSource(consumer);

        //     3. 转换数据
        // 使用 map 算子对 ds1 中的每个元素进行转换。
        // 转换后的结果存储在 SingleOutputStreamOperator<MonitorInfo> 类型的数据流 mapStream 中。
        SingleOutputStreamOperator<MonitorInfo> mapStream = ds1.map(new MapFunction<String, MonitorInfo>() {
            @Override
            public MonitorInfo map(String s) throws Exception {
                return JSON.parseObject(s, MonitorInfo.class);
            }
        });
        //     4. 计算平均速度
        // 使用 keyBy 算子对 mapStream 中的数据进行分组。
        KeyedStream<MonitorInfo, String> keyedStream = mapStream.keyBy(new KeySelector<MonitorInfo, String>() {
            @Override
            public String getKey(MonitorInfo monitorInfo) throws Exception {
                return monitorInfo.getMonitorId();
            }
        });
        // 每隔一分钟，统计5分钟的车辆平均车速和车的数量  滑动窗口
        // 使用 window 算子为 keyedStream 分配滑动窗口，窗口大小为 5 分钟，滑动步长为 1 分钟。
        // 使用 apply 算子对每个窗口内的数据进行计算。
        SingleOutputStreamOperator<AverageSpeed> resultStream = keyedStream.window(SlidingProcessingTimeWindows.of(Time.minutes(5), Time.minutes(1)))
                .apply(new WindowFunction<MonitorInfo, AverageSpeed, String, TimeWindow>() {

            @Override
            public void apply(String key, TimeWindow window, Iterable<MonitorInfo> input, Collector<AverageSpeed> out) throws Exception {
                //     获取每个卡口的平均车速和车辆数量
                //     所有汽车的速度之和 / 所有汽车的数量
                double sumSpeed = 0;
                int totalCar = 0;
                // 对每个窗口内的 MonitorInfo 对象进行遍历
                for (MonitorInfo monitorInfo : input) {
                    sumSpeed += monitorInfo.getSpeed();
                    totalCar += 1;
                }
                double avgSpeed = sumSpeed / totalCar;
                long start = window.getStart();
                long end = window.getEnd();
                // 最终将结果封装成 AverageSpeed 对象并通过 Collector 输出。
                out.collect(new AverageSpeed(0, start, end, key, avgSpeed, totalCar));
            }
        });
        resultStream.print();
        // 把resultStream插入到数据库
        resultStream.addSink(JdbcSink.sink("insert into t_average_speed(id,start_time,end_time,monitor_id,avg_speed,car_count) values(null,?,?,?,?,?)", (PreparedStatement ps, AverageSpeed averageSpeed) -> {
            ps.setLong(1, averageSpeed.getStartTime());
            ps.setLong(2, averageSpeed.getEndTime());
            ps.setString(3, averageSpeed.getMonitorId());
            ps.setDouble(4, averageSpeed.getAvgSpeed());
            ps.setInt(5, averageSpeed.getCarCount());
        }, JdbcExecutionOptions.builder()
                .withBatchSize(1)
                .withBatchIntervalMs(5000)
                .build(), new JdbcConnectionOptions
                .JdbcConnectionOptionsBuilder()
                .withUrl("jdbc:mysql://hadoop102:3306/flink_project?useSSL=false&useUnicode=true&characterEncoding=utf8")
                .withDriverName("com.mysql.cj.jdbc.Driver")
                .withUsername("root")
                .withPassword("12345678")
                .build()));
        // 5.执行
        env.execute();
    }
}
