package cn.doitedu.sql;

import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Schema;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.connector.ChangelogMode;
import org.apache.flink.types.Row;
import org.apache.flink.types.RowKind;

public class Demo18_ChangelogStreamToChangelogTable {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:///d:/ckpt");

        env.setParallelism(1);

        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        //2、创建数据源
        DataStream<Row> changelogStream =
                env.fromElements(
                        Row.ofKind(RowKind.INSERT, "1", 12),
                        Row.ofKind(RowKind.INSERT, "2", 5),
                        Row.ofKind(RowKind.UPDATE_AFTER, "1", 100));

        // 3、转为table
        Table table =
                tenv.fromChangelogStream(
                        changelogStream,
                        Schema.newBuilder().primaryKey("f0").build(),
                        ChangelogMode.upsert());

        // 4、创建视图
        tenv.createTemporaryView("InputTable", table);

        //tenv.executeSql("select * from InputTable").print();



        // 5、聚合查询
        tenv.executeSql("SELECT f0 AS id, SUM(f1) AS score FROM InputTable group by f0")
                .print();

        env.execute();


    }
}
