package cn.doitedu.demo.stream_table;

import com.alibaba.fastjson.JSON;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Schema;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class AppendOnly流转表 {

    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(2000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:///d:/ckpt");
        env.setParallelism(1);
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        //
        KafkaSource<String> source = KafkaSource.<String>builder()
                .setBootstrapServers("doitedu:9092")
                .setTopics("a-1")
                .setStartingOffsets(OffsetsInitializer.latest())
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .setGroupId("gg-1")
                .setClientIdPrefix("cc-1")
                .build();


        // "{"uid":1,"name":"zs","age":18,"action_time":10000}"
        DataStreamSource<String> stream = env.fromSource(source, WatermarkStrategy.noWatermarks(), "s");
        // json解析成javaBean类型的流
        SingleOutputStreamOperator<Person> personStream = stream.map(new MapFunction<String, Person>() {
            @Override
            public Person map(String s) throws Exception {
                return  JSON.parseObject(s, Person.class);
            }
        });

        /* *
         *  将上面的流转成 表
         */

        // 如果流是一个标准javaBean类型的流，转表时底层会通过反射手段来自动构建表schema
        //  它会通过反射javaBean类型，然后将bean类中的属性名作为表的字段名；属性java类型映射成表的sql类型
        Table table = tenv.fromDataStream(personStream);
        tenv.createTemporaryView("tmp",table);


        // 流转表时，也可以手动指定表的schema
        Table table2 = tenv.fromDataStream(personStream,
                Schema.newBuilder()
                        .column("uid", DataTypes.INT())
                        .column("name", DataTypes.STRING())
                        .column("age", DataTypes.INT())
                        .column("action_time", DataTypes.BIGINT())
                        .columnByExpression("name2","upper(name)")
                        .columnByExpression("rt","to_timestamp_ltz(action_time,3)")
                        // ==> watermark for action_time as action_time
                        .watermark("rt","rt")
                        .build()
        );
        tenv.createTemporaryView("tmp2",table2);

        tenv.executeSql("select * from tmp2").print();

    }


    @Data
    @NoArgsConstructor
    @AllArgsConstructor
    public static class Person{
        private Integer uid;
        private String name;
        private Integer age;
        private Long action_time;
    }


}
