package streaming.api.source.tEnv;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import base.model.ZBTopic1;
import utils.PropertiesReader;

import java.util.Properties;

public class TEnvSource01 {

    private static String kafkaServers = PropertiesReader.get("default.kafka.servers");
    private static String topicFrom = PropertiesReader.get("default.kafka.topic.json.C");

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.enableCheckpointing(10000, CheckpointingMode.EXACTLY_ONCE);
        EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env, settings);

        Properties props = new Properties();
        props.setProperty("bootstrap.servers", kafkaServers);
        props.setProperty("group.id", "flink-consumer-TEnvSource01");
        DataStream inputStream = env.addSource(new FlinkKafkaConsumer(topicFrom, new SimpleStringSchema(), props));
        DataStream<ZBTopic1> dataStream = inputStream.map(new MapFunction<String, ZBTopic1>() {
            @Override
            public ZBTopic1 map(String s) throws Exception {
                JSONObject obj = JSON.parseObject(s);
                return new ZBTopic1(obj.getString("t_id"),obj.getString("t_key"),obj.getString("t_val"));
            }
        });
        dataStream.print("data:");
//        tEnv.createTemporaryView(topicFrom, dataStream);
        String tableFields = "id as t_id,key as t_key,val as t_val";
        tEnv.createTemporaryView("kafkaSource", dataStream, tableFields);

        Table kafka = tEnv.from("kafkaSource");
        kafka.printSchema();

        TableResult result = tEnv.executeSql("select * from kafkaSource");
        result.print();
    }
}
