package cn.itcast.job;

import cn.itcast.avro.SseAvro;
import cn.itcast.avro.SzseAvro;
import cn.itcast.bean.CleanBean;
import cn.itcast.config.QuotConfig;
import cn.itcast.map.SseMap;
import cn.itcast.map.SzseMap;
import cn.itcast.task.*;
import cn.itcast.util.QuotUtil;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Time;


import cn.itcast.avro.AvroDeserializerSchema;

import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;

import javax.annotation.Nullable;
import java.util.Properties;

//1.创建StockStream单例对象
public class StockStream {
    //    1.创建StockStream单例对象，创建main方法
//  2.获取流处理执行环境
//  3.设置事件时间
//  4.设置检查点机制
//  5.设置重启机制
//6.触发执行
    public static void main(String[] args) {
        //2.获取流处理执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //3.设置事件时间、并行度
        env.setParallelism(1);
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
        //设置检查点机制
      /*  env.enableCheckpointing(5000);
        env.setStateBackend(new FsStateBackend("hdfs://node01:8020/checkpoint/stock"));
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(60000);
        env.getCheckpointConfig().setFailOnCheckpointingErrors(false);
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(1);
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.DELETE_ON_CANCELLATION);
        //5.设置重启机制
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, Time.seconds(5)));*/
/*        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", QuotConfig.config.getProperty("bootstrap.servers"));
        properties.setProperty("group.id", QuotConfig.config.getProperty("group.id"));*/
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", QuotConfig.config.getProperty("bootstrap.servers"));
        properties.setProperty("group.id", QuotConfig.config.getProperty("group.id"));
        //sse：上证
        //2. 新建FlinkKafkaConsumer11对象

        FlinkKafkaConsumer011<SseAvro> sseKafkaConsumer = new FlinkKafkaConsumer011<SseAvro>(QuotConfig.config.getProperty("sse.topic"),
                new AvroDeserializerSchema(QuotConfig.config.getProperty("sse.topic")),
                properties);

        FlinkKafkaConsumer011<SzseAvro> szseSource = new FlinkKafkaConsumer011<SzseAvro>(
                QuotConfig.config.getProperty("szse.topic"),
                new AvroDeserializerSchema<SzseAvro>(QuotConfig.config.getProperty("szse.topic")),
                properties);


        //4. 设置消费模式
        sseKafkaConsumer.setStartFromEarliest();
        szseSource.setStartFromEarliest();
        DataStreamSource<SzseAvro> szseValue = env.addSource(szseSource);
        //5. 添加source获取DataStream
        DataStreamSource<SseAvro> sseValue = env.addSource(sseKafkaConsumer);

        //6. 打印DataStream
/*        sseValue.print();
        szseValue.print();*/


        //szse：深证

        SingleOutputStreamOperator<SseAvro> sseFilterData  = sseValue.filter(new FilterFunction<SseAvro>() {
            @Override
            public boolean filter(SseAvro value) throws Exception {
                return QuotUtil.checkTime(value) && QuotUtil.checkData(value);
            }
        });
        SingleOutputStreamOperator<SzseAvro> szseFilterData  = szseValue.filter(new FilterFunction<SzseAvro>() {
            @Override
            public boolean filter(SzseAvro value) throws Exception {
                return QuotUtil.checkTime(value) && QuotUtil.checkData(value);
            }
        });
        SingleOutputStreamOperator<CleanBean> SseMapData = sseFilterData.map(new SseMap());
        SingleOutputStreamOperator<CleanBean> SzseMapData = szseFilterData.map(new SzseMap());
        DataStream<CleanBean> union = SseMapData.union(SzseMapData);
        SingleOutputStreamOperator<CleanBean> filterData  = union.filter(new FilterFunction<CleanBean>() {
            @Override
            public boolean filter(CleanBean value) throws Exception {
                return QuotUtil.isStock(value);
            }
        });
     /*   SingleOutputStreamOperator<CleanBean> cleanBeanSingleOutputStreamOperator = filterData.assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<CleanBean>(org.apache.flink.streaming.api.windowing.time.Time.seconds(Long.valueOf(QuotConfig.config.getProperty("delay.time")))) {
            @Override
            public long extractTimestamp(CleanBean element) {
                return element.getEventTime();
            }
        });
        cleanBeanSingleOutputStreamOperator.print();*/
        //设置水位线
        DataStream<CleanBean> waterData = filterData.assignTimestampsAndWatermarks(new AssignerWithPeriodicWatermarks<CleanBean>() {
            //能延迟多久的时间
            long delayTime = Long.parseLong(QuotConfig.config.getProperty("delay.time"));
            long currentTimestamp = 0l;

            //返回当前水印
            @Nullable
            @Override
            public Watermark getCurrentWatermark() {
                return new Watermark(currentTimestamp - delayTime);
            }

            @Override
            public long extractTimestamp(CleanBean element, long previousElementTimestamp) {
                //得到发送来的事件时间
                Long eventTime = element.getEventTime();
                //判断有无延迟 取出事件发生的
                currentTimestamp = Math.max(currentTimestamp, eventTime);
                return eventTime;
            }
        });
       // waterData.print("水位线数据");
        //    DataStream<CleanBean> unionData = SseMapData.union(SzseMapData);
        /**
         * 1.秒级行情
         * 2.分时行情
         * 3.历史数据备份
         * 4.涨跌幅
         * 5.K线行情
         */
        //1.秒级行情
         new StockSecTask().process(waterData);
        //2.分时行情
        //new StockMinTask().process(waterData);
        //3.历史数据备份
        //new StockHdfsTask().process(waterData);

        //4).个股涨幅榜
        //4.涨跌幅
        new StockQuotIncrTask().process(waterData);
        //6.触发执行
        try {
            env.execute("stock stream");
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
