package com.atguigu.gmall.realtime.tutorial.sql;

import com.atguigu.gmall.realtime.tutorial.beans.Sensor;
import com.atguigu.gmall.realtime.tutorial.beans.Sensor1;
import com.atguigu.gmall.realtime.tutorial.beans.Sensor2;
import com.atguigu.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class SQLTEST_Kafka1 {
    public static void main(String[] args)  throws Exception{
//TODO 0 基本环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //CK设置

        //TODO 1 定义Table流环境
        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .inStreamingMode()
                .build();

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env,settings);

        // TODO 2 使用FlinkSQL 定义table 并且通过sql查询计算指标 ,
        String sql =
                "CREATE TABLE sensor(id STRING,create_time STRING,temperature DOUBLE," +
                        " rowtime AS TO_TIMESTAMP(create_time),WATERMARK FOR rowtime AS rowtime )    WITH (   " + MyKafkaUtil.getKafkaDDLTest("sensor","sensor_test","localhost:9092")
                + ")";
        System.out.println(sql);
        tableEnv.executeSql(sql);


        Table base = tableEnv.sqlQuery("select id,create_time,temperature,rowtime from sensor ");
        DataStream<Sensor2> baseSensor = tableEnv.toAppendStream(base, Sensor2.class);
        baseSensor.print("base >>>>>");



        Table table = tableEnv.sqlQuery("select " +
                "DATE_FORMAT(TUMBLE_START(rowtime, INTERVAL '10' SECOND ),'yyyy-MM-dd HH:mm:ss') stt, " +
                "DATE_FORMAT(TUMBLE_END(rowtime, INTERVAL '10' SECOND ),'yyyy-MM-dd HH:mm:ss') edt , " +
                "id ,sum(temperature) temperature ,UNIX_TIMESTAMP()*1000 ts from sensor " +
                "group by  TUMBLE(rowtime, INTERVAL '10' SECOND ),id");  // 10秒延迟

//        Table table = tableEnv.sqlQuery("select " +
//                "'a' stt, " +
//                "'b' edt , " +
//                "id ,temperature ,UNIX_TIMESTAMP()*1000 ts from sensor "
//                );

//        Table table = tableEnv.sqlQuery("select id,create_time,temperature,rowtime from sensor ");
        DataStream<Sensor1> sensorTable = tableEnv.toAppendStream(table, Sensor1.class);
        sensorTable.print("calculate >>>>>");




        env.execute();

    }
}
