package cn.itcast.streaming.task;

import cn.itcast.streaming.async.AsyncHttpQueryFunction;
import cn.itcast.streaming.entity.ItcastDataPartObj;
import cn.itcast.streaming.entity.OnlineDataObj;
import cn.itcast.streaming.entity.VehicleInfoModel;
import cn.itcast.streaming.flatmap.VehicleInfoMapMysqlFunction;
import cn.itcast.streaming.map.LocationInfoRedisFunction;
import cn.itcast.streaming.sink.OnlineStatisticsMysqlSink;
import cn.itcast.streaming.source.VehicleInfoMysqlSource;
import cn.itcast.streaming.utils.JsonParsePartUtil;
import cn.itcast.streaming.window.function.OnlineStatisticsWindowFunction;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.HashMap;
import java.util.concurrent.TimeUnit;

/**
 * Author itcast
 * Date 2021/9/27 10:16
 * Desc 实现车辆的实时上报故障诊断业务分析
 * 1.读取车辆的数据，将json字符串转换成对象
 * 2.读取出来正确的数据
 * 3.将车辆的数据通过地理位置（经度纬度）去redis中拉取
 * 3.1.如果拉去数据成功，直接封装对象
 * 3.2.如果拉去省市区地理位置失败，异步数据流读取高德Api请求地理位置并将数据保存到redis中
 * 4.将从redis和高德Api拉宽的数据进行合并处理
 * 5.使用窗口操作，比如30s统计一些窗口内的故障告警对象返回
 * 6.读取mysql数据库中的车辆静态数据，车辆车型车系、销售时间等
 * 7.窗口数据和静态数据进行connect并flatMap，拉宽数据
 * 8.将数据写入到mysql中
 * 9.执行任务流环境
 */
public class OnlineStatisticsTask extends BaseTask {
    private static final Logger logger = LoggerFactory.getLogger(OnlineStatisticsTask.class);

    public static void main(String[] args) throws Exception {
        //1）初始化flink流处理的运行环境（事件时间、checkpoint、hadoop name）
        StreamExecutionEnvironment env = getEnv(OnlineStatisticsTask.class.getSimpleName());
        //2）接入kafka数据源，消费kafka数据
        DataStreamSource<String> kafkaStream = getKafkaStream(env, "__consumer_online_alarm_analysis_", SimpleStringSchema.class);
        //3）将消费到的json字符串转换成ItcastDataPartObj对象
        DataStream<ItcastDataPartObj> source = kafkaStream.map(
                JsonParsePartUtil::parseJsonToObject
        )
                //4）过滤掉异常数据，保留正常数据
                .filter(obj -> StringUtils.isEmpty(obj.getErrorData()));
        //5）读取redis中的位置数据并,<geohash,VehicleLocationModel>，生成新的数据流
        SingleOutputStreamOperator<ItcastDataPartObj> itcastDataMapStream = source
                .map(new LocationInfoRedisFunction());
        //6）过滤出来redis拉宽成功的地理位置数据
        SingleOutputStreamOperator<ItcastDataPartObj> withLocationStream = itcastDataMapStream
                .filter(obj -> StringUtils.isNotEmpty(obj.getProvince()));
        //7）过滤出来redis拉宽失败的地理位置数据
        SingleOutputStreamOperator<ItcastDataPartObj> noWithLocationStream = itcastDataMapStream
                .filter(obj -> StringUtils.isEmpty(obj.getProvince()));
        //8）对redis拉宽失败的地理位置数据使用异步io访问高德地图逆地理位置查询地理位置信息，并将返回结果写入到redis中
        SingleOutputStreamOperator<ItcastDataPartObj> withLocationAsyncStream = AsyncDataStream.unorderedWait(
                noWithLocationStream,
                new AsyncHttpQueryFunction(),
                3000,
                TimeUnit.MICROSECONDS
        );

        //9）将reids拉宽的地理位置数据与高德api拉宽的地理位置数据进行合并
        WindowedStream<ItcastDataPartObj, String, TimeWindow> windowStream = withLocationStream
                .union(withLocationAsyncStream)
                //10）创建原始数据的30s的滚动窗口，根据vin进行分流操作
                .assignTimestampsAndWatermarks(
                        new BoundedOutOfOrdernessTimestampExtractor<ItcastDataPartObj>(Time.seconds(3)) {
                            @Override
                            public long extractTimestamp(ItcastDataPartObj element) {
                                return element.getTerminalTimeStamp();
                            }
                        }
                ).keyBy(obj -> obj.getVin())
                .window(TumblingEventTimeWindows.of(Time.seconds(30)));
        //11）对原始数据的窗口流数据进行实时故障分析（区分出来告警数据和非告警数据19个告警字段）
        DataStream<OnlineDataObj> onlineStatisticsStream = windowStream
                .apply(new OnlineStatisticsWindowFunction());
        //12）加载业务中间表（7张表：车辆表、车辆类型表、车辆销售记录表，车俩用途表4张），并进行广播
        DataStream<HashMap<String, VehicleInfoModel>> vehicleInfoBrocastStream = env
                .addSource(new VehicleInfoMysqlSource()).broadcast();
        //13）将第11步和第12步的广播流结果进行关联，并应用拉宽操作
        SingleOutputStreamOperator<OnlineDataObj> result = onlineStatisticsStream.connect(vehicleInfoBrocastStream)
                .flatMap(new VehicleInfoMapMysqlFunction());
        //14）将拉宽后的结果数据写入到mysql数据库中
        result.addSink(new OnlineStatisticsMysqlSink());
        //15）启动作业
        env.execute();
    }
}
