package com.atguigu;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
import org.apache.flink.util.Collector;

import java.lang.reflect.Field;
import java.time.ZoneOffset;

/**
 * @Author lizhenchao@atguigu.cn
 * @Date 2021/2/17 16:40
 */
public class ETDemo {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        setWebUi(env, 1998);
        
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        tEnv.getConfig().setLocalTimeZone(ZoneOffset.ofHours(0));
        // 作为事件时间的字段必须是 timestamp 类型, 所以根据 long 类型的 ts 计算出来一个 t
        tEnv.executeSql("create table sensor(" +
                            "id string," +
                            "ts bigint," +
                            "vc int, " +
                            "t as to_timestamp(from_unixtime(ts, 'yyyy-MM-dd HH:mm:ss')), " +
                            "watermark for t as t - interval '2' second" +
                            ")with(" +
                            "   'connector' = 'kafka'," +
                            "   'topic' = 'test'," +
                            "   'properties.bootstrap.servers' = 'hadoop162:9029,hadoop163:9092,hadoop164:9092'," +
                            "   'properties.group.id' = 'test'," +
                            "   'scan.startup.mode' = 'latest-offset'," +
                            "   'format' = 'csv'" +
                            ")");
        
        final Table table = tEnv.sqlQuery("select * from sensor");
        final DataStream<Row> ds = tEnv.toAppendStream(table, Row.class);
        ds.process(new ProcessFunction<Row, String>() {
            @Override
            public void processElement(Row value, Context ctx, Collector<String> out) throws Exception {
                System.out.println(ctx.timerService().currentWatermark());
                out.collect(value.toString());
            }
        }).setParallelism(2).print().setParallelism(1);
        
        env.execute();
    }
    
    public static void setWebUi(StreamExecutionEnvironment env, int port) {
        try {
            final Field field = StreamExecutionEnvironment.class.getDeclaredField("configuration");
            field.setAccessible(true);
            final Configuration config = (Configuration) field.get(env);
            config.setInteger("rest.port", port);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
