package com.atguigu.flinksqltest.day11;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.Tumble;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

import java.sql.Timestamp;

import static org.apache.flink.table.api.Expressions.$;
/**
 * ClassName: Test01
 * Package: com.atguigu.flinksqltest.day11
 * Description:
 *
 * @Author ChenJun
 * @Create 2023/4/19 10:04
 * @Version 1.0
 */
public class Test1 {
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        DataStream<String> dataStream = env.socketTextStream("hadoop102", 9999);
        DataStream<Tuple3<String, Double, Long>> tupleStream = dataStream.map(new MapFunction<String, Tuple3<String, Double, Long>>() {
            @Override
            public Tuple3<String, Double, Long> map(String value) throws Exception {
                String[] fields = value.split(",");
                String id = fields[0];
                double vc = Double.parseDouble(fields[1]);
                long ts = Long.parseLong(fields[2]);
                return new Tuple3<>(id, vc, ts);
            }
        });

        Table table = tableEnv.fromDataStream(tupleStream, $("id"), $("vc"), $("ts"), $("pt").proctime());

        Table resultTable = table.window(Tumble.over("10.seconds").on("pt").as("w"))
                .groupBy($("id"), $("w"))
                .select($("id"), $("vc").max().as("max_vc"), $("w").end().as("window_end"));


        DataStream<String> resultStream = tableEnv.toAppendStream(resultTable, Row.class)
                .map(new MapFunction<Row, String>() {
                    @Override
                    public String map(Row value) throws Exception {
                        String id = value.getField(0).toString();
                        double maxVc = Double.parseDouble(value.getField(1).toString());
                        long windowEnd = ((Timestamp) value.getField(2)).getTime();
                        return id + "," + maxVc + "," + windowEnd;
                    }
                });
        resultStream.addSink(new FlinkKafkaProducer<>("hadoop102:9092", "first", new SimpleStringSchema()));


    }
}
