package cn.itcast.job;


import cn.itcast.bean.CleanBean;
import cn.itcast.config.QuotConfig;
import cn.itcast.map.SseMap;
import cn.itcast.map.SzseMap;
import cn.itcast.task.*;
import cn.itcast.util.QuotUtil;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

import java.util.Properties;

/**
 * 流处理模块:指数业务
 * 子业务包含:
 * 1.秒级行情-hbase
 * 2.分时行情-doris
 * 3.K线行情-doris
 * 4.分时数据备份-hdfs
 */
public class IndexStream {

    /**
     * 1.创建IndexStream对象，创建main方法
     * 2.获取流处理执行环境
     * 3.设置事件时间、并行度
     * 4.设置检查点机制
     * 5.设置重启机制
     * 6.整合Kafka(新建反序列化类)
     * 7.数据过滤（时间和null字段）
     * 8.数据转换、合并
     * 9.过滤个股数据
     * 10.设置水位线
     * 11.业务数据处理
     * 12.触发执行
     */
    //1.创建IndexStream对象，创建main方法
    public static void main(String[] args) throws Exception {

        //2.获取流处理执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //3.设置事件时间、并行度
        env.setParallelism(1);//为了便于测试,设置为1,生产环境一般与kafka分区数保持一致

//        //开发期间,可以注销检查点配置,上测试和生产环境的时候,必须打开
//        //4.设置检查点机制
//        env.enableCheckpointing(5000L);
//        env.getCheckpointConfig().setCheckpointTimeout(60000l);
//        //检查点存储路径
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://node1:8020/checkpoint/stock");
//        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);//强一致性
//        env.getCheckpointConfig().setForceUnalignedCheckpoints(false); //检查点制作失败,不影响程序运行
//        //外部检查点配置,如果取消任务,保留检查点
//        env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
//
//        //5.设置重启机制
//        //固定延迟重启策略


//        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, org.apache.flink.api.common.time.Time.seconds(5)));

        //6.整合Kafka(新建反序列化类)
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", QuotConfig.config.getProperty("bootstrap.servers"));
        properties.setProperty("group.id", QuotConfig.config.getProperty("group.id"));

        //消费沪市行情-sse
        FlinkKafkaConsumer<String> sseKafkaConsumer = new FlinkKafkaConsumer<>(QuotConfig.config.getProperty("sse.topic"), new SimpleStringSchema(), properties);
        FlinkKafkaConsumer<String> szseKafkaConsumer = new FlinkKafkaConsumer<>(QuotConfig.config.getProperty("szse.topic"), new SimpleStringSchema(), properties);

        //设置消费模式,从头消费
        sseKafkaConsumer.setStartFromEarliest();//只用于测试
        szseKafkaConsumer.setStartFromEarliest();
        //加载sse数据
        DataStreamSource<String> sseSource = env.addSource(sseKafkaConsumer);
        DataStreamSource<String> szseSource = env.addSource(sseKafkaConsumer);

        SingleOutputStreamOperator<CleanBean> mapDataSse = sseSource.map(new SseMap());
        SingleOutputStreamOperator<CleanBean> mapDataSzse = szseSource.map(new SzseMap());


        //7.数据过滤（时间和null字段）
        //sse数据过滤
        SingleOutputStreamOperator<CleanBean> sseFilter = mapDataSse.filter(new FilterFunction<CleanBean>() {
            //返回true的数据
            @Override
            public boolean filter(CleanBean value) throws Exception {
                return QuotUtil.checkTime(value) && QuotUtil.checkData(value);
            }
        });

        //szse数据过滤
        SingleOutputStreamOperator<CleanBean> szseFilter = mapDataSzse.filter(new FilterFunction<CleanBean>() {
            //返回true的数据
            @Override
            public boolean filter(CleanBean value) throws Exception {
                return QuotUtil.checkTime(value) && QuotUtil.checkData(value);
            }
        });

        DataStream<CleanBean> unionData = sseFilter.union(szseFilter);

        //9.过滤指数数据
        SingleOutputStreamOperator<CleanBean> stockData = unionData.filter(new FilterFunction<CleanBean>() {
            @Override
            public boolean filter(CleanBean value) throws Exception {
                return QuotUtil.isIndex(value);
            }
        });

        //10.设置水位线
        DataStream<CleanBean> waterData = stockData.assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<CleanBean>(Time.seconds(2)) { //延迟时间
            @Override
            public long extractTimestamp(CleanBean element) {
                return element.getEventTime(); //提取事件时间
            }
        });

        //11.业务数据处理
        /**
         * Task子业务包含:
         * 1.秒级行情-hbase
         * 2.分时行情-doris
         * 3.K线行情 - doris
         * 4.分时数据备份-hdfs
         */
        //1.秒级行情-hbase
        new IndexSecTask().process(waterData);
        //2.分时行情-doris
        new IndexMinTask().process(waterData);
        //3.K线行情 - doris
        new IndexDayKlineTask().process(waterData);
        //4.分时数据备份-hdfs
        new IndexMinHdfsTask().process(waterData);

        //12.触发执行
        env.execute("index stream");
    }

}
