package net.sina.realtime.traffic.controller;

import com.alibaba.fastjson.JSON;

import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.SneakyThrows;
import net.sina.realtime.traffic.bean.MonitorInfo;
import net.sina.realtime.traffic.schema.JSONDeserializationSchema;
import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.connector.jdbc.JdbcStatementBuilder;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet;

import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.time.Duration;
import java.util.Properties;

public class _03TaoPaiCarController {
    @Data
    @AllArgsConstructor
    @NoArgsConstructor
    public static class Violation{
        private int id;
        private String car;
        private String violation;
        private Long createTime;
    }

    public static void main(String[] args) throws Exception {

        //1. env-准备环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setRuntimeMode(RuntimeExecutionMode.AUTOMATIC);
        env.setParallelism(1);

        //2. source-加载数据
        //2. source-加载数据  读取kafka中的消息
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "node101:9092,node102:9092");
        properties.setProperty("group.id", "g1");
        properties.setProperty("fetch.max.bytes", "10485760"); // 10MB
        properties.setProperty("max.poll.records", "1000");
        FlinkKafkaConsumer<String> kafkaSource = new FlinkKafkaConsumer<String>("topic-car",new SimpleStringSchema(),properties);
        kafkaSource.setCommitOffsetsOnCheckpoints(true);
        DataStreamSource<String> dataStreamSource = env.addSource(kafkaSource);
        dataStreamSource.print();

        //3. transformation-数据处理转换
        // 进行转换  将数据变为一个个的对象
        DataStream<MonitorInfo> ds1 = dataStreamSource.map(new MapFunction<String, MonitorInfo>() {
            @Override
            public MonitorInfo map(String jsonStr) throws Exception {
                // 通过代码可以观察到，json中即使字段不太一致，只要 action_time  类中的字段 actionTime
                MonitorInfo speedInfo = JSON.parseObject(jsonStr, MonitorInfo.class);
                return speedInfo;

            }
        });


        SingleOutputStreamOperator<MonitorInfo> mapStream = ds1.
                assignTimestampsAndWatermarks(WatermarkStrategy
                        .<MonitorInfo>forBoundedOutOfOrderness(Duration.ofSeconds(5))
                        .withTimestampAssigner(
                                new SerializableTimestampAssigner<MonitorInfo>() {
                                    @SneakyThrows
                                    @Override
                                    public long extractTimestamp(MonitorInfo element, long recordTimestamp) {
                                        return element.getActionTime();
                                    }
                                }
                        ));

        //3. transformation-数据处理转换
        SingleOutputStreamOperator<Violation> resultDs = mapStream
                .keyBy(speedInfo -> speedInfo.getCar()).flatMap(new RichFlatMapFunction<MonitorInfo, Violation>() {

            ValueState<MonitorInfo> valueState;

            @Override
            public void open(Configuration parameters) throws Exception {
                ValueStateDescriptor<MonitorInfo> stateDescriptor = new ValueStateDescriptor<MonitorInfo>("vs1", MonitorInfo.class);
                valueState = getRuntimeContext().getState(stateDescriptor);
            }

            @Override
            public void flatMap(MonitorInfo speedInfo, Collector<Violation> collector) throws Exception {
                // 根据车牌号，获取上一个汽车的 时间，还有 摄像头的id      speedInfo.getActionTime()*1000
                MonitorInfo _speedInfo = valueState.value();
                valueState.update(speedInfo);
                if (_speedInfo != null && speedInfo.getActionTime() - _speedInfo.getActionTime() < 10 && speedInfo.getMonitorId() != _speedInfo.getMonitorId()) {
                    Violation violation = new Violation(0, speedInfo.getCar(), "涉嫌套牌", System.currentTimeMillis());
                    collector.collect(violation);
                }
            }
        });
        resultDs.print();
        //4. sink-数据输出
        resultDs.addSink(JdbcSink.sink("INSERT INTO `t_violation_list` VALUES (?,?,?)", new JdbcStatementBuilder<Violation>() {
            @Override
            public void accept(PreparedStatement ps, Violation value) throws SQLException {
                ps.setString(1,value.getCar());
                ps.setString(2,value.getViolation());
                ps.setLong(3,value.getCreateTime());
            }
        },JdbcExecutionOptions.builder().withBatchSize(1).withBatchIntervalMs(5000).build()
                ,new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
                        .withDriverName("com.mysql.cj.jdbc.Driver")
                        .withPassword("123456")
                        .withUrl("jdbc:mysql://node101:3306/flink_project")
                        .withUsername("root").build()));

        //5. execute-执行
        env.execute();
    }
}