package david.java.flink_sql.streamApiIntegration;

import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Schema;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.connector.ChangelogMode;
import org.apache.flink.table.data.StringData;
import org.apache.flink.types.Row;
import org.apache.flink.types.RowKind;
import org.apache.flink.util.Collector;

import java.time.Instant;
import java.util.Date;

import static org.apache.flink.table.api.Expressions.*;


/**
 * @Description: flink 要使用  $("name"), row() 等, 需要引入依赖 import static org.apache.flink.table.api.Expressions.*;
 * @See: https://nightlies.apache.org/flink/flink-docs-release-1.14/docs/dev/table/data_stream_api/#examples-for-tochangelogstream
 * @Author: ZhaoDawei
 * @Date: Create in 10:13 上午 2022/4/1
 */
public class S4_2_ToChangeLogStream {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // create Table with event-time
        tableEnv.executeSql(
                "CREATE TABLE GeneratedTable "
                        + "("
                        + "  name STRING,"
                        + "  score INT,"
                        + "  event_time TIMESTAMP_LTZ(3),"
                        + "  WATERMARK FOR event_time AS event_time - INTERVAL '10' SECOND"
                        + ")"
                        + "WITH ('connector'='datagen')");

        Table table = tableEnv.from("GeneratedTable");


        // === EXAMPLE 1 ===

        // convert to DataStream in the simplest and most general way possible (no event-time)

        Table simpleTable = tableEnv
                .fromValues(row("Alice", 12), row("Alice", 2), row("Bob", 12))
                .as("name", "score")
                .groupBy($("name"))
                .select($("name"), $("score").sum());

        tableEnv.toChangelogStream(simpleTable)
                .executeAndCollect()
                .forEachRemaining(System.out::println);

        // print:
        //     +I[Bob, 12]
        //     +I[Alice, 12]
        //     -U[Alice, 12]
        //     +U[Alice, 14]


        // === EXAMPLE 2 ===

        // convert to DataStream in the simplest and most general way possible (with event-time)
        DataStream<Row> dataStream = tableEnv.toChangelogStream(table);

        // since `event_time` is a single time attribute in the schema, it is set as the
        // stream record's timestamp by default; however, at the same time, it remains part of the Row

        dataStream.process(
                new ProcessFunction<Row, Void>() {
                    @Override
                    public void processElement(Row row, Context context, Collector<Void> collector) throws Exception {
                        //[name, score, event_time]
                        System.out.println(row.getFieldNames(true));

                        assert context.timestamp() == row.<Instant>getFieldAs("event_time").toEpochMilli();
                    }
                }
        );
        // env.execute();


        // === EXAMPLE 3 ===

        // convert to DataStream but write out the time attribute as a metadata column which means
        // it is not part of the physical schema anymore
        DataStream<Row> dataStream3 = tableEnv.toChangelogStream(
                table,

                Schema.newBuilder()
                        .column("name", "STRING")
                        .column("score", "INT")
                        .columnByMetadata("rowtime", "TIMESTAMP_LTZ(3)")
                        .build());
        // the stream record's timestamp is defined by the metadata; it is not part of the Row
        dataStream3.process(
                new ProcessFunction<Row, Void>() {
                    @Override
                    public void processElement(Row row, Context ctx, Collector<Void> out) throws Exception {
                        // [name, score]
                        System.out.println(row.getFieldNames(true));
                        System.out.println(ctx.timestamp());
                    }
                }
        );
        // env.execute();


        // === EXAMPLE 4 ===

// for advanced users, it is also possible to use more internal data structures for efficiency

// note that this is only mentioned here for completeness because using internal data structures adds complexity and additional type handling

// however, converting a TIMESTAMP_LTZ column to `Long` or STRING to `byte[]` might be convenient,
// also structured types can be represented as `Row` if needed

        DataStream<Row> dataStream4 = tableEnv.toChangelogStream(
                table,
                Schema.newBuilder()
                        .column("name", DataTypes.STRING().bridgedTo(StringData.class))
                        .column("score", DataTypes.INT())
                        .column("event_time", DataTypes.TIMESTAMP_LTZ(3).bridgedTo(Long.class))  // 1648866837595
                        // .column("event_time", DataTypes.TIMESTAMP_LTZ(3)) // 2022-04-02T02:33:57.594Z
                        .build()
        );
        dataStream4.process(
                new ProcessFunction<Row, Void>() {
                    @Override
                    public void processElement(Row row, Context ctx, Collector<Void> out) throws Exception {
                        // [name, score]
                        System.out.println(row.getFieldNames(true));
                        System.out.println(row.getField(0));
                        System.out.println(row.getField(1));
                        System.out.println(row.getField(2));
                        System.out.println(ctx.timestamp());
                    }
                }
        );
        env.execute();


    }


}
