package org.example.flink;

import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.table.api.*;
import org.apache.flink.table.api.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.Json;
import org.apache.flink.table.descriptors.Kafka;
import org.apache.flink.table.descriptors.Schema;
import org.apache.flink.table.functions.ScalarFunction;
import org.apache.flink.table.runtime.operators.window.Window;
import org.apache.flink.types.Row;
import org.apache.flink.util.Collector;

/**
 *
 */
public class KafkaRead {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        final Schema schema = new Schema()
                .field("name", DataTypes.STRING())
                .field("type", DataTypes.STRING());

        Kafka kafka = new Kafka()
                .version("universal")
                .topic("my_tpc")
                .property("zookeeper.connect", "ha:2181")
                .property("bootstrap.servers", "ha:9092")
                .property("group.id", "testGroup")
                .startFromGroupOffsets();
        tEnv.connect(kafka)
                .withFormat(new Json().failOnMissingField(false))
                .withSchema(schema)
                .createTemporaryTable("t");
        tEnv.registerFunction("str", (ScalarFunction) Class.forName("org.example.flink.TestFunca").newInstance());
        Table table = tEnv.sqlQuery("select * from t")
//                .map("str(name)")
                .addColumns("currentTimestamp() as t")
                .addOrReplaceColumns("str(name) as name");

//        table.window(Tumble.over("").);

        Table countTable = tEnv.sqlQuery("select 1 from " + table);

//        table.insertInto("");
//        table.filter("name != null");
//        table.printSchema();
//        tEnv.toAppendStream(countTable, Integer.class)
//                .timeWindowAll(Time.milliseconds(3000))
//                .sum(0)
//                .print();
//        tEnv.toAppendStream(table, Row.class).countWindowAll(5).process(new ProcessAllWindowFunction<Row, Void, GlobalWindow>() {
//            @Override
//            public void process(Context context, Iterable<Row> elements, Collector<Void> out) throws Exception {
//                System.out.println(elements);
//            }
//        });


        CountTriggerWithTimeout<Row> tCountTriggerWithTimeout = new CountTriggerWithTimeout<>(5000, TimeCharacteristic.ProcessingTime);
        tEnv.toAppendStream(table, Row.class).keyBy(new KeySelector<Row, String>() {
            @Override
            public String getKey(Row value) throws Exception {
                return (String) value.getField(0);
            }
        }).timeWindow(Time.minutes(1)).trigger(tCountTriggerWithTimeout)
                .process(new ProcessWindowFunction<Row, Void, String, TimeWindow>() {
                    @Override
                    public void process(String tuple, Context context, Iterable<Row> elements, Collector<Void> out) throws Exception {
                        System.out.println(elements);
                    }
                }).print();
//        tEnv.toRetractStream(countTable, Row.class).print();
        tEnv.execute("KafkaRead");


    }
}
