package david.java.flink_sql.streamApiIntegration;

import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Schema;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Instant;

/**
 * @Description:
 * @See https://nightlies.apache.org/flink/flink-docs-release-1.14/docs/dev/table/data_stream_api/#examples-for-fromdatastream
 * @Author: ZhaoDawei
 * @Date: Create in 10:14 上午 2022/3/29
 */
public class S3_1_HandleInsertOnlyStream_1 {


    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        DataStream<User> dataStream = env.fromElements(
                new User("Alice", 4, Instant.ofEpochMilli(1000)),
                new User("Bob", 6, Instant.ofEpochMilli(1001)),
                new User("Alice", 10, Instant.ofEpochMilli(1002))
        );


        // === EXAMPLE 1 ===

        // derive all physical columns automatically
        Table table = tableEnv.fromDataStream(dataStream);
        table.printSchema();
        // (
        // `name` STRING,
        // `score` INT,
        // `event_time` TIMESTAMP_LTZ(9)


        // === EXAMPLE 2 ===

        // derive all physical columns automatically
        // but add computed columns (in this case for creating a proctime attribute column)
        tableEnv.fromDataStream(
                dataStream,
                Schema.newBuilder()
                        .columnByExpression("pro_time", "PROCTIME()")
                        .build())
                .printSchema();
        // (
        //   `name` STRING,
        //   `score` INT,
        //   `event_time` TIMESTAMP_LTZ(9),
        //   `pro_time` TIMESTAMP_LTZ(3) NOT NULL *PROCTIME* AS PROCTIME()
        // )


        // === EXAMPLE 3 ===

        // derive all physical columns automatically
        // but add computed columns (in this case for creating a rowtime attribute column)
        // and a custom watermark strategy
        tableEnv.fromDataStream(
                dataStream,
                Schema.newBuilder()
                        .columnByExpression("rowtime", "cast(event_time as timestamp_ltz(3))")
                        .watermark("rowtime", "rowtime - interval '10' second")
                        .build())
                .printSchema();
        //(
        //   `name` STRING,
        //   `score` INT,
        //   `event_time` TIMESTAMP_LTZ(9),
        //   `rowtime` TIMESTAMP_LTZ(3) *ROWTIME* AS cast(event_time as timestamp_ltz(3)),
        //   WATERMARK FOR `rowtime`: TIMESTAMP_LTZ(3) AS rowtime - interval '10' second
        // )


        // === EXAMPLE 4 ===

        // derive all physical columns automatically
        // but access the stream record's timestamp for creating a rowtime attribute column
        // also rely on the watermarks generated in the DataStream API

        // we assume that a watermark strategy has been defined for `dataStream` before
        // (not part of this example)
        tableEnv.fromDataStream(
                dataStream,
                Schema.newBuilder()
                        .columnByMetadata("rowtime", "TIMESTAMP_LTZ(3)")
                        .watermark("rowtime", "SOURCE_WATERMARK()")
                        .build())
                .printSchema();
        // (
        //   `name` STRING,
        //   `score` INT,
        //   `event_time` TIMESTAMP_LTZ(9),
        //   `rowtime` TIMESTAMP_LTZ(3) *ROWTIME* AS cast(event_time as timestamp_ltz(3)),
        //   WATERMARK FOR `rowtime`: TIMESTAMP_LTZ(3) AS source_watermark()
        // )


        //// === EXAMPLE 5 ===
        //
        // // define physical columns manually
        // // in this example,
        // //   - we can reduce the default precision of timestamps from 9 to 3
        // //   - we also project the columns and put `event_time` to the beginning
        tableEnv.fromDataStream(
                dataStream,
                Schema.newBuilder()
                        .column("event_time", "TIMESTAMP_LTZ(3)")
                        .column("name", "STRING")
                        .column("score", "INT")
                        .watermark("event_time", "SOURCE_WATERMARK()")
                        .build())
                .printSchema();
        // // (
        // //  `event_time` TIMESTAMP_LTZ(3) *ROWTIME*,
        // //  `name` VARCHAR(200),
        // //  `score` INT
        // // )


    }


    public static class User {
        public String name;

        public Integer score;

        public Instant event_time;

        // default constructor for DataStream API
        public User() {
        }

        // fully assigning constructor for Table API
        public User(String name, Integer score, Instant event_time) {
            this.name = name;
            this.score = score;
            this.event_time = event_time;
        }
    }
}
