package page;

import com.zhang.gmall.utils.KafkaUtil;
import com.zhang.gmall.utils.MyEnv;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @title:
 * @author: zhang
 * @date: 2022/4/1 18:18
 */
public class PageView {
    public static void main(String[] args) throws Exception {
        //TODO 1.获取环境
        StreamExecutionEnvironment env = MyEnv.getStreamingEnv(4, false);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //TODO 2.读取kafka dwd_page_log数据
        tableEnv.executeSql("" +
                "create table page_log (" +
                "       `page` map<string,string>," +
                "       `ts` bigint," +
                "       `rt` as TO_TIMESTAMP(FROM_UNIXTIME(ts/1000))," +
                "       WATERMARK FOR rt AS rt - INTERVAL '2' SECOND " +
                ")" + KafkaUtil.getKafkaDDL("dwd_page_log", "PageView"));

        Table resultTable = tableEnv.sqlQuery("" +
                "select " +
                "       'pv' source," +
                "       DATE_FORMAT(window_start,'yyyy-MM-dd HH:mm:ss') stt," +
                "       DATE_FORMAT(window_end,'yyyy-MM-dd HH:mm:ss') edt," +
                "       count(*) pv," +
                " UNIX_TIMESTAMP()*1000 ts " +
                " FROM TABLE (" +
                " TUMBLE( TABLE page_log," +
                " DESCRIPTOR(rt) ," +
                " INTERVAL '10' SECOND)) " +
                " group by window_start,window_end");

        tableEnv.toChangelogStream(resultTable).print();


        env.execute();
    }
}
