package day4;

import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.redis.RedisSink;
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig;
import org.apache.flink.util.Collector;

import java.time.Duration;

public class job2 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);
        env.enableCheckpointing(5000);

        KafkaSource<String> source = KafkaSource.<String>builder()
                .setBootstrapServers("bigdata1:9092")
                .setTopics("ChangeRecord")
                .setGroupId("day4 job2")
                .setStartingOffsets(OffsetsInitializer.earliest())
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .build();

        FlinkJedisPoolConfig bigdata1 = new FlinkJedisPoolConfig.Builder()
                .setHost("bigdata1")
                .setPort(6379)
                .setPassword("123456")
                .build();

        WatermarkStrategy<String> watermarkStrategy = WatermarkStrategy
                .<String>forBoundedOutOfOrderness(Duration.ofSeconds(5))
                .withTimestampAssigner(new SerializableTimestampAssigner<String>() {
                    @Override
                    public long extractTimestamp(String s, long l) {
                        String[] strings = s.split(",");
                        return Long.parseLong(strings[0]);
                    }
                });

        SingleOutputStreamOperator<String> process = env.fromSource(source, watermarkStrategy, "source job2")
                .map(s -> s.split(","))
                .keyBy(s -> s[1])
                .window(TumblingProcessingTimeWindows.of(Time.seconds(30)))
                .process(new ProcessWindowFunction<String[], String, String, TimeWindow>() {
                    ValueState<Integer> count;
                    @Override
                    public void open(Configuration parameters) throws Exception {
                        count = getRuntimeContext().getState(new ValueStateDescriptor<Integer>("count", Types.INT));
                    }
                    @Override
                    public void process(String s, ProcessWindowFunction<String[], String, String, TimeWindow>.Context context, Iterable<String[]> iterable, Collector<String> collector) throws Exception {
                        for (String[] strings : iterable) {
                            if (strings[2].equals("预警")) count.update(count.value() + 1);
                            else count.update(0);
                            if (count.value() >= 30) {
                                collector.collect(strings[1]);
                                count.update(0);
                            }
                        }
                    }
                });

        process.addSink(new RedisSink<>(bigdata1 , new RedisSink2()));

        env.execute();
    }
}
