package com.gis.aoitest.kafkatest;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.util.Properties;

public class KafkaTransform {

    private static OutputTag<String> sideStram;
    public static void main(String[] args) throws Exception {
        // 环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 数据源
        // kafka的配置
        Properties props = new Properties();

        props.put("bootstrap.servers", "localhost:9092");

        props.put("group.id", "kfk1");

        props.put("enable.auto.commit", "true");

        props.put("auto.commit.interval.ms", "1000");

        props.put("auto.offset.reset","latest");

        props.put("key.deserializer",
                "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer",
                "org.apache.kafka.common.serialization.StringDeserializer");

        // 从kafka中读取数据
        DataStreamSource<String> dataStream = env.addSource(new FlinkKafkaConsumer011<String>("kfk", new SimpleStringSchema(), props));

        sideStram = new OutputTag<String>("sideStream"){};

        // 转换
        SingleOutputStreamOperator<String> kafkaStream = dataStream.map(line -> {
                    String[] words = line.split(",");
                    return new Tmp(words[0], Integer.parseInt(words[1]));
                })
                .keyBy("k")
                .process(new AlarmMessege());


        kafkaStream.print("main_stream");
        kafkaStream.getSideOutput(sideStram).print("side_stream");
        kafkaStream.addSink(new MyKfkJdbcSink());

        // 执行
        env.execute();
    }

    public static class AlarmMessege extends KeyedProcessFunction<Tuple, Tmp, String>{

        private ValueState<Integer> lastV;
        // 定时器
        private ValueState<Long> timerState;

        @Override
        public void open(Configuration parameters) throws Exception {
            lastV = getRuntimeContext().getState(new ValueStateDescriptor<Integer>("last-v", Integer.class));
            timerState = getRuntimeContext().getState(new ValueStateDescriptor<Long>("timer-st", Long.class));
        }

        @Override
        public void processElement(Tmp value, KeyedProcessFunction<Tuple, Tmp, String>.Context ctx, Collector<String> out) throws Exception {
            Long timer = timerState.value();
            Integer lastValue = lastV.value();
            if (lastValue != null){
                int abs = Math.abs(lastValue - value.v);
                if (abs > 10){
                    out.collect("当前key: " + value.k + " 超过了上次的数值, (本次数值, 上次数值) -> (" + value.v + ", " + lastValue + ");");
                    // System.out.println("当前key: " + value.k + " 超过了上次的数值, (本次数值, 上次数值) -> (" + value.v + ", " + lastValue + ");");
                    if (timer == null) {
                        ctx.timerService().registerProcessingTimeTimer(ctx.timerService().currentProcessingTime() + 5 * 1000L);
                        timerState.update(ctx.timerService().currentProcessingTime() + 5 * 1000L);
                    }
                }else{
                    if (timer != null){
                        ctx.timerService().deleteProcessingTimeTimer(timer);
                        timerState.clear();
                    }
                }
            }

            lastV.update(value.v);
        }

        @Override
        public void onTimer(long timestamp, KeyedProcessFunction<Tuple, Tmp, String>.OnTimerContext ctx, Collector<String> out) throws Exception {
            ctx.output(sideStram, ctx.getCurrentKey() + "连续5s数值差值超过10！！！");
            // out.collect(ctx.getCurrentKey() + "连续5s数值差值超过10！！！");
            // System.out.println(ctx.getCurrentKey() + "连续5s数值差值超过10！！！");
            timerState.clear();
        }

        @Override
        public void close() throws Exception {
            timerState.clear();
            lastV.clear();
        }
    }

    public static class Tmp{
        private String k;
        private int v;

        public Tmp(){}
        public Tmp(String k, int c){
            this.k = k;
            this.v = c;
        }

        public String getK() {
            return k;
        }

        public void setK(String k) {
            this.k = k;
        }

        public int getV() {
            return v;
        }

        public void setV(int v) {
            this.v = v;
        }
    }

    private static class MyKfkJdbcSink extends RichSinkFunction<String> {

        // 声明连接和预编译语句
        Connection connection = null;
        PreparedStatement insertStmt = null;
        PreparedStatement updateStmt = null;

        @Override
        public void open(Configuration parameters) throws Exception {
            connection = DriverManager.getConnection(
                    "jdbc:mysql://localhost:3306/lnnu", "root", "00000000");
            insertStmt = connection.prepareStatement(
                    "insert into kfk_tbl (value) values (?)"
            );
            updateStmt = connection.prepareStatement(
                    "update sensor_temp set temp = ? where id = ?"
            );
        }

        @Override
        public void invoke(String value, Context context) throws Exception {
            // 每来一条数据，调用连接，执行sql
            // 直接执行更新语句，如果没有更新，那么插入
            insertStmt.setString(1,value);
            insertStmt.execute();
//            updateStmt.setDouble(1,value.getTemperature());
//            updateStmt.setString(2, value.getId());
//            updateStmt.execute();
//            if (updateStmt.getUpdateCount() == 0) {
//                insertStmt.setString(1, value.getId());
//                insertStmt.setDouble(2, value.getTemperature());
//                insertStmt.execute();
//            }
        }

        @Override
        public void close() throws Exception {
            insertStmt.close();
            updateStmt.close();
            connection.close();
        }
    }
}
