package com.atguigu.gmall.realtime.dws.app;

/*import com.atguigu.gmall.realtime.common.base.BaseApp;
import com.atguigu.gmall.realtime.common.base.BaseSqlApp;
import com.atguigu.gmall.realtime.common.constant.Constant;
import com.atguigu.gmall.realtime.common.function.SplitWordFunction;
import com.atguigu.gmall.realtime.common.util.FlinkSqlUtil;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwsTrafficSourceKeywordPageViewWindow extends BaseSqlApp {
    public static void main(String[] args) {
        new DwsTrafficSourceKeywordPageViewWindow().start(10021, 4, Constant.DWS_TRAFFIC_SOURCE_KEYWORD_PAGE_VIEW_WINDOW);
    }

    @Override
    public void handle(StreamTableEnvironment tableEnv, StreamExecutionEnvironment env) {
        // 读取DWD页面日志主题数据 topic_dwd_traffic_page
        tableEnv.executeSql(
                " create table page_log (" +
                        " common MAP<STRING,STRING> , " +
                        " page MAP<STRING,STRING> ,"  +
                        " ts BIGINT , " +
                        " et as to_timestamp_ltz(ts , 3) , " +
                        " watermark for et as et - interval '3' second "+
                        ") " + FlinkSqlUtil.getKafkaSourceDDL(Constant.TOPIC_DWD_TRAFFIC_PAGE , Constant.DWS_TRAFFIC_SOURCE_KEYWORD_PAGE_VIEW_WINDOW)
        );
        tableEnv.sqlQuery("select * from page_log ").execute().print();

        // 筛选出搜索行为的数据
        Table keywordTable = tableEnv.sqlQuery(
                " select " +
                        " page['item'] keyword ," +
                        " et " +
                        " from page_log " +
                        " where " +
                        " ( page['last_page_id'] = 'search' or page['last_page_id'] = 'home' ) " +
                        " and " +
                        " page['item_type'] = 'keyword' " +
                        " and " +
                        " page['item'] is not null "
        );
        tableEnv.createTemporaryView("keyword_table", keywordTable);

        // 注册分词函数
        tableEnv.createTemporaryFunction("keyword_split", SplitWordFunction.class);

        // 分词
        Table wordTable = tableEnv.sqlQuery(
                " SELECT  " +
                        " word ,  " +
                        " et " +
                        " FROM keyword_table " +
                        " LEFT JOIN LATERAL TABLE(keyword_split(keyword)) ON TRUE");

        tableEnv.createTemporaryView("word_table" , wordTable );

        //开窗 、 分组聚合
        //事件时间滚动窗口  ， 窗口大小 10s
        //分组窗口 :  滚动  滑动  会话
        //TVF :     滚动  滑动  累积  未来支持会话

        Table windowTable = tableEnv.sqlQuery(
                " select " +
                        " DATE_FORMAT( window_start , 'yyyy-MM-dd HH:mm:ss') stt  , " +
                        " DATE_FORMAT( window_end ,'yyyy-MM-dd HH:mm:ss' )  edt ,  " +
                        " DATE_FORMAT( window_end ,'yyyy-MM-dd' ) cur_date , " +
                        " word , " +
                        " count(*) word_count " +
                        " from  TABLE( " +
                        "   TUMBLE(TABLE word_table , DESCRIPTOR(et) , INTERVAL '10' SECOND ) " +
                        " ) " +
                        " group by window_start ,window_end ,  word "
        );

        tableEnv.createTemporaryView("window_table" , windowTable);

        //tableEnv.sqlQuery( " select * from window_table").execute().print();

        //写出到Doris表
        tableEnv.executeSql(
                " create table " + Constant.DWS_TRAFFIC_SOURCE_KEYWORD_PAGE_VIEW_WINDOW + " ( " +
                        " stt STRING , " +
                        " edt STRING , " +
                        " cur_date STRING , " +
                        " keyword STRING , " +
                        " keyword_count BIGINT " +
                        " ) " + FlinkSqlUtil.getDorisSinkDDL(Constant.DORIS_DATABASE , Constant.DWS_TRAFFIC_SOURCE_KEYWORD_PAGE_VIEW_WINDOW)

        ) ;

        //写入
        windowTable.executeInsert( Constant.DWS_TRAFFIC_SOURCE_KEYWORD_PAGE_VIEW_WINDOW ) ;


    }
}*/
import com.atguigu.gmall.realtime.common.base.BaseSqlApp;
import com.atguigu.gmall.realtime.common.constant.Constant;
import com.atguigu.gmall.realtime.common.function.SplitWordFunction;
import com.atguigu.gmall.realtime.common.util.FlinkSourceUtil;
import com.atguigu.gmall.realtime.common.util.FlinkSqlUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author WEIYUNHUI
 * @date 2024/8/7 15:40
 */
public class DwsTrafficSourceKeywordPageViewWindow  extends BaseSqlApp {

    public static void main(String[] args) {
        new DwsTrafficSourceKeywordPageViewWindow().start(10021 , 4 , Constant.DWS_TRAFFIC_SOURCE_KEYWORD_PAGE_VIEW_WINDOW);
    }
    @Override
    public void handle(StreamTableEnvironment tableEnv, StreamExecutionEnvironment env) {

        //读取 DWD 页面日志主题数据  topic_dwd_traffic_page
        tableEnv.executeSql(
                " create table page_log (" +
                        " common MAP<STRING,STRING> , " +
                        " page MAP<STRING,STRING> ,"  +
                        " ts BIGINT , " +
                        " et as to_timestamp_ltz(ts , 3) , " +
                        " watermark for et as et - interval '3' second "+
                        ") " + FlinkSqlUtil.getKafkaSourceDDL(Constant.TOPIC_DWD_TRAFFIC_PAGE , Constant.DWS_TRAFFIC_SOURCE_KEYWORD_PAGE_VIEW_WINDOW)
        );

        //tableEnv.sqlQuery("select * from page_log ").execute().print();

        //筛选出搜索行为的数据
        Table keywordTable = tableEnv.sqlQuery(
                " select " +
                        " page['item'] keyword ," +
                        " et " +
                        " from page_log " +
                        " where " +
                        " ( page['last_page_id'] = 'search' or page['last_page_id'] = 'home' ) " +
                        " and " +
                        " page['item_type'] = 'keyword' " +
                        " and " +
                        " page['item'] is not null "
        );
        tableEnv.createTemporaryView("keyword_table" , keywordTable);

        //注册分词函数
        tableEnv.createTemporaryFunction("keyword_split" , SplitWordFunction.class);
        //分词
        Table wordTable = tableEnv.sqlQuery(
                " SELECT  " +
                        " word ,  " +
                        " et " +
                        " FROM keyword_table " +
                        " LEFT JOIN LATERAL TABLE(keyword_split(keyword)) ON TRUE");

        tableEnv.createTemporaryView("word_table" , wordTable );

        //开窗 、 分组聚合
        //事件时间滚动窗口  ， 窗口大小 10s
        //分组窗口 :  滚动  滑动  会话
        //TVF :     滚动  滑动  累积  未来支持会话

        Table windowTable = tableEnv.sqlQuery(
                " select " +
                        " DATE_FORMAT( window_start , 'yyyy-MM-dd HH:mm:ss') stt  , " +
                        " DATE_FORMAT( window_end ,'yyyy-MM-dd HH:mm:ss' )  edt ,  " +
                        " DATE_FORMAT( window_end ,'yyyy-MM-dd' ) cur_date , " +
                        " word , " +
                        " count(*) word_count " +
                        " from  TABLE( " +
                        "   TUMBLE(TABLE word_table , DESCRIPTOR(et) , INTERVAL '10' SECOND ) " +
                        " ) " +
                        " group by window_start ,window_end ,  word "
        );

        tableEnv.createTemporaryView("window_table" , windowTable);

        //tableEnv.sqlQuery( " select * from window_table").execute().print();

        //写出到Doris表
        tableEnv.executeSql(
                " create table " + Constant.DWS_TRAFFIC_SOURCE_KEYWORD_PAGE_VIEW_WINDOW + " ( " +
                        " stt STRING , " +
                        " edt STRING , " +
                        " cur_date STRING , " +
                        " keyword STRING , " +
                        " keyword_count BIGINT " +
                        " ) " + FlinkSqlUtil.getDorisSinkDDL(Constant.DORIS_DATABASE , Constant.DWS_TRAFFIC_SOURCE_KEYWORD_PAGE_VIEW_WINDOW)

        ) ;

        //写入
        windowTable.executeInsert( Constant.DWS_TRAFFIC_SOURCE_KEYWORD_PAGE_VIEW_WINDOW ) ;

    }
}