import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import pojo.etcData;

import java.util.Properties;

public class KafkaToFlinkLearnold {
    public static void main(String[] args) throws Exception {
        //创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env, settings);

        // Kafka 配置
        Properties kafkaProps = new Properties();
        kafkaProps.setProperty("bootstrap.servers", "192.168.88.131:9092");//集群地址
        kafkaProps.setProperty("group.id", "flink");//消费者组id
        kafkaProps.setProperty("auto.offset.reset","latest");//有offset，从offset进行消费，从新数据位置进行开始消费
        kafkaProps.setProperty("flink-partition-discovery-interval-millis","5000");
        kafkaProps.setProperty("enable.auto.commit","true");//自动提交，提交到默认主题
        kafkaProps.setProperty("auto.commit.interval.ms","2000");//自动提交时间间隔


        // 从 Kafka 主题读取数据
        DataStreamSource<String> kafkaStream = env.addSource(new FlinkKafkaConsumer<String>(
                "ETC_flink",
                new KafkaDeserializationSchema<String>() {
                    @Override
                    public boolean isEndOfStream(String nextElement) {
                        return false;
                    }

                    @Override
                    public String deserialize(ConsumerRecord<byte[], byte[]> record) throws Exception {
                        if (record == null || record.value() == null) {
                            return null;
                        } else {
                            return new String(record.value());
                        }
                    }

                    @Override
                    public TypeInformation<String> getProducedType() {
                        return BasicTypeInfo.STRING_TYPE_INFO;
                    }
                },
                kafkaProps
        ));

        SingleOutputStreamOperator<etcData> etcDataSingleOutputStreamOperator = kafkaStream.flatMap(new FlatMapFunction<String, etcData>() {
            @Override
            public void flatMap(String value, Collector<etcData> collector) throws Exception {
                JSONObject jsonObject = JSON.parseObject(value);
                etcData etcData = new etcData(
                        jsonObject.getInteger("XH"),
                        jsonObject.getString("CP"),
                        jsonObject.getString("CX"),
                        jsonObject.getTimestamp("RKSJ").toLocalDateTime(),
                        jsonObject.getString("SFZRKMC"),
                        jsonObject.getTimestamp("CKSJ").toLocalDateTime(),
                        jsonObject.getString("SFZCKMC"),
                        jsonObject.getString("BZ")
                );
                // 发送数据到下游
                collector.collect(etcData);
            }
        });

// 将 JSON 数据映射为 Flink Table,以及视图
        Table tableA = tenv.fromDataStream(etcDataSingleOutputStreamOperator, "XH, CP, CX, RKSJ, SFZRKMC, CKSJ, SFZCKMC, BZ");

// 注册表
        tenv.createTemporaryView("ETC", tableA);

// 执行 Flink SQL 查询
//        Table resultTable = tenv.sqlQuery("SELECT XH, CP, CX, RKSJ, SFZRKMC, CKSJ, SFZCKMC, BZ FROM ETC");
        Table resultTable = tenv.sqlQuery("SELECT COUNT(*) FROM ETC");

// 将table转为datastream
// sink
        DataStream<etcData> resultDS = tenv.toAppendStream(resultTable, etcData.class);

        resultDS.print();

// 执行任务
        env.execute("Kafka to Flink Example");

    }
}