package streaming.api.source.tEnv;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.typeutils.RowTypeInfo;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
import utils.KafkaHelper;
import utils.PropertiesReader;

import java.util.Properties;

public class TEnvSource02 {

    private static String kafkaServers = PropertiesReader.get("default.kafka.servers");
    private static String topicFrom = PropertiesReader.get("default.kafka.topic.json.C");

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.enableCheckpointing(10000, CheckpointingMode.EXACTLY_ONCE);
        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .useBlinkPlanner()
                .inStreamingMode()
                .build();
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env, settings);

        Properties props = new Properties();
        props.setProperty("bootstrap.servers", kafkaServers);
        props.setProperty("group.id", "flink-consumer-TEnvSource02");

        String jsonFields = "['t_id','t_key','t_val']";
        String jsonFieldTypes = "['String','String','String']";
        String[] kafkaFields = KafkaHelper.getKafkaFields(jsonFields); //字段名列表
        TypeInformation<?>[] kafkaTypes = KafkaHelper.getKafkaFieldsTypeInfo(jsonFieldTypes);//字段类型列表

        DataStream inputStream = env.addSource(new FlinkKafkaConsumer(topicFrom, new SimpleStringSchema(), props));
        DataStream<Row> dataStream = inputStream.map(new MapFunction<String, Row>() {
             @Override
             public Row map(String s) throws Exception {
                 JSONObject obj = JSON.parseObject((String) s);
                 int len = kafkaFields.length;
                 Row row = new Row(len);
                 for (int i = 0; i < len; i++) {
                     row.setField(i, obj.getString(kafkaFields[i]));
                 }
                 return row;
             }
         }).returns(new RowTypeInfo(kafkaTypes, kafkaFields));
        inputStream.print("data:");

        tEnv.createTemporaryView("KafkaSource", dataStream);

        Table kafka = tEnv.from("KafkaSource");
        kafka.printSchema();

        TableResult result = tEnv.executeSql("select * from KafkaSource");
        result.print();

    }
}
