package com.group2.edu.realtime.dws.app;

import com.group2.edu.realtime.common.base.BaseSQLApp;
import com.group2.edu.realtime.common.util.SQLUtil;
import com.group2.edu.realtime.dws.function.MyKeywordAnalyzeFunction;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author 高耀
 * @date 2024/12/16 9:34<p></p>
 *
 * DWS层流量域来源关键词粒度页面浏览各窗口汇总表
 * 需要开启的进程：
 *      f1、DwdBaseLog、doris
 */
public class DwsTrafficSourceKeywordPageViewWindow extends BaseSQLApp {
    public static void main(String[] args) {
        new DwsTrafficSourceKeywordPageViewWindow().start(
                10021,
                4,
                "DwsTrafficSourceKeywordPageViewWindow"
        );
    }
    @Override
    protected void handle(StreamExecutionEnvironment env, StreamTableEnvironment streamTableEnvironment) {
        //TODO 1 注册自定义炸裂函数
        streamTableEnvironment.createFunction("kw_analyze", MyKeywordAnalyzeFunction.class);
        //TODO 2 从dwd层的page_view主题下读取数据，使用kafka的connector
        //后面有开窗操作，需要指定水位线
        streamTableEnvironment.executeSql(
            "create table dwd_traffic_page (\n" +
                        "`common` MAP<STRING, STRING>,\n" +
                        "`page` MAP<STRING, STRING>,\n" +
                        "`ts` BIGINT,\n" +
                        //13位的ts，先除1000以后就是精确到秒，再转成timestamp类型，后面开窗要的就是timestamp类型的
                        "`et` AS TO_TIMESTAMP_LTZ(ts/1000, 0),\n" +
                        "WATERMARK FOR et AS et - INTERVAL '0' SECOND\n" +
                        ")" + SQLUtil.getKafkaProperty("dwd_traffic_page", "DwsTrafficSourceKeywordPageViewWindow")
        );

        //TODO 3 从数据中过滤出需要的数据 并 取出想要的字段
        Table filteredTable = streamTableEnvironment.sqlQuery(
            "select\n " +
                    "page['item'] as full_word, " +
                    "et " +
                    "from dwd_traffic_page " +
                    "where page['item_type'] = 'keyword' " +
                    "and page['item'] is not null"
        );
        //注册表
        streamTableEnvironment.createTemporaryView("filtered_table", filteredTable);
        //测试
//        filteredTable.execute().print();

        //TODO 4 使用分词函数分词
        Table keywordTable = streamTableEnvironment.sqlQuery(
            "select " +
                    "keyword, " +
                    "et " +
                    "from filtered_table, lateral table(kw_analyze(full_word)) explode_table(keyword)"
        );
        //注册名字
        streamTableEnvironment.createTemporaryView("keyword_table", keywordTable);

        //TODO 5 使用TVF分组开窗聚合
        Table result = streamTableEnvironment.sqlQuery(
            "select\n " +
                    "date_format(window_start, 'yyyy-MM-dd HH:mm:ss') as stt,\n " +
                    "date_format(window_end, 'yyyy-MM-dd HH:mm:ss') as edt,\n " +
                    "date_format(window_start, 'yyyy-MM-dd') as cur_date,\n " +
                    "'SEARCH' as source,\n " +
                    "keyword,\n " +
                    "count(*) as keyword_count\n " +
                    "from\n " +
                    "TABLE (\n" +
                    "TUMBLE(TABLE keyword_table, DESCRIPTOR(et), INTERVAL '10' SECOND)\n" +
                    ") \n" +
                    "GROUP BY window_start, window_end, keyword"
        );
        //注册表名
        streamTableEnvironment.createTemporaryView("result_table", result);
        //测试
//        result.execute().print();

        //TODO 6 创建连接Doris的connector
        streamTableEnvironment.executeSql(
            "create table dws_traffic_source_keyword_page_view_window (\n" +
                    "`stt` STRING,\n " +
                    "`edt` STRING,\n " +
                    "`cur_date` STRING,\n " +
                    "`source` STRING,\n " +
                    "`keyword` STRING,\n" +
                    "`keyword_count` BIGINT\n" +
                    ") \n" + SQLUtil.getDorisProperty("dws_traffic_source_keyword_page_view_window")
        );

        //TODO 7 写入表
        streamTableEnvironment.executeSql("insert into dws_traffic_source_keyword_page_view_window select * from result_table");
//        result.executeInsert("dws_traffic_source_keyword_page_view_window");


    }
}
