package david.java.flink_sql.streamApiIntegration;

import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

/**
 * @Description: to changelogStream 回撤流转stream 的细节
 * <p>
 * The following example shows how updating tables can be converted.
 * Every result row represents an entry in a changelog with a change flag that can be queried by calling row.getKind() on it.
 * In the example, the second score for Alice creates an update before (-U) and update after (+U) change.
 * @link https://nightlies.apache.org/flink/flink-docs-release-1.14/docs/dev/table/data_stream_api/#converting-between-datastream-and-table
 * @Author: ZhaoDawei
 * @Date: Create in 10:19 上午 2022/3/22
 */
public class S1_StreamTableConversion_2 {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // env.setRuntimeMode(RuntimeExecutionMode.STREAMING);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        DataStreamSource<Row> dataStream = env.fromElements(
                Row.of("Alice", 12),
                Row.of("Bob", 10),
                Row.of("Alice", 100)
        );

        Table inputTable = tableEnv.fromDataStream(dataStream).as("name", "score");

        tableEnv.createTemporaryView("InputTable", inputTable);
        // sqlQuery 不会返回结果
        Table resultTable = tableEnv.sqlQuery("select name, sum(score) from InputTable Group by name");
        // (
        //   `name` STRING,
        //   `EXPR$1` INT
        // )
        resultTable.printSchema();

        // 这个是用来查看执行结果的
        TableResult execute = resultTable.execute();
        // TableResult execute = tableEnv.executeSql("select name, sum(score) from InputTable Group by name");
        //+----+--------------------------------+-------------+
        // | op |                           name |      EXPR$1 |
        // +----+--------------------------------+-------------+
        // | +I |                            Bob |          10 |
        // | +I |                          Alice |          12 |
        // | -U |                          Alice |          12 |
        // | +U |                          Alice |         112 |
        // +----+--------------------------------+-------------+
        execute.print();


           /*
           用这个会报错, 原因是group by 即用了 GroupAggregate, 他是有回撤的, toDataStream不支持回撤
           Table sink 'default_catalog.default_database.Unregistered_DataStream_Sink_1'
           doesn't support consuming update changes which is produced by node GroupAggregate(groupBy=[name], select=[name, SUM(score) AS EXPR$1])
           */
        // DataStream<Row> compare2ChangeLog = tableEnv.toDataStream(resultTable);

        DataStream<Row> resultStream = tableEnv.toChangelogStream(resultTable);
            /*getKind ===> INSERT
            getKind ===> INSERT
            getArity ===> 2
            getArity ===> 2
            getField(0) ===> Bob
            getField(0) ===> Alice
            getField(1) ===> 12
            getField(1) ===> 10
            getFieldNames ===> [name, EXPR$1]
            getFieldNames ===> [name, EXPR$1]
            9> +I[Alice, 12]
            8> +I[Bob, 10]

            getKind ===> UPDATE_BEFORE
            getArity ===> 2
            getField(0) ===> Alice
            getField(1) ===> 12
            getFieldNames ===> [name, EXPR$1]
            9> -U[Alice, 12]

            getKind ===> UPDATE_AFTER
            getArity ===> 2
            getField(0) ===> Alice
            getField(1) ===> 112
            getFieldNames ===> [name, EXPR$1]
            9> +U[Alice, 112]*/
        resultStream.map((MapFunction<Row, Row>) t -> {
            // getKind ===> INSERT
            // getKind ===> UPDATE_BEFORE
            // getKind ===> UPDATE_AFTER
            System.out.println("getKind ===> " + t.getKind());
            // Returns the number of fields in the row.
            System.out.println("getArity ===> " + t.getArity());
            System.out.println("getField(0) ===> " + t.getField(0));
            System.out.println("getField(1) ===> " + t.getField(1));
            System.out.println("getFieldNames ===> " + t.getFieldNames(true));
            return t;
        });
        // 8> +I[Bob, 10]
        // 9> +I[Alice, 12]
        // 9> -U[Alice, 12]
        // 9> +U[Alice, 112]
        resultStream.print();
        env.execute();


    }
}
