import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import pojo.etcData;

import java.util.Properties;

public class KafkaToFlinkLearn {
    public static void main(String[] args) throws Exception {
        // 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env, settings);

        // Kafka 配置
        Properties kafkaProps = new Properties();
        kafkaProps.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.88.131:9092");//集群地址
        kafkaProps.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "flink");//消费者组id
        kafkaProps.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");//有offset，从offset进行消费，从新数据位置进行开始消费
        kafkaProps.setProperty("flink.partition-discovery.interval-millis", "5000");
        kafkaProps.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");//自动提交，提交到默认主题
        kafkaProps.setProperty(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "2000");//自动提交时间间隔

        // 从 Kafka 主题读取数据
        DataStreamSource<String> kafkaStream = env.addSource(new FlinkKafkaConsumer<>(
                "ETC_flink",
                new SimpleStringSchema(),
                kafkaProps
        ));

        SingleOutputStreamOperator<etcData> etcDataSingleOutputStreamOperator = kafkaStream.flatMap(new FlatMapFunction<String, etcData>() {
            @Override
            public void flatMap(String value, Collector<etcData> collector) throws Exception {
                JSONObject jsonObject = JSON.parseObject(value);
                etcData etcData = new etcData(
                        jsonObject.getInteger("XH"),
                        jsonObject.getString("CP"),
                        jsonObject.getString("CX"),
                        jsonObject.getTimestamp("RKSJ").toLocalDateTime(),
                        jsonObject.getString("SFZRKMC"),
                        jsonObject.getTimestamp("CKSJ").toLocalDateTime(),
                        jsonObject.getString("SFZCKMC"),
                        jsonObject.getString("BZ")
                );
                // 发送数据到下游
                collector.collect(etcData);
            }
        });

        // 将 JSON 数据映射为 Flink Table,以及视图
        Table tableA = tenv.fromDataStream(etcDataSingleOutputStreamOperator, "XH, CP, CX, RKSJ, SFZRKMC, CKSJ, SFZCKMC, BZ");

        // 注册表
        tenv.createTemporaryView("ETC", tableA);

        // 执行 Flink SQL 查询
        Table resultTable = tenv.sqlQuery("SELECT SFZCKMC, COUNT(*) as VehicleCount FROM ETC GROUP BY SFZCKMC");

        // 将table转为datastream
        // sink
        DataStream<etcData> resultDS = tenv.toAppendStream(resultTable, etcData.class);

        resultDS.print();

        // 执行任务
        env.execute("Kafka to Flink Example");
    }
}
