package com.atguigu.actual.edu0417.app.dws;

import com.atguigu.actual.edu0417.app.SqlBaseApp;
import com.atguigu.actual.edu0417.beans.KeywordUDTF;
import com.atguigu.actual.edu0417.common.EduConfig;
import com.atguigu.actual.edu0417.unils.MyKafkaUtil;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author: 洛尘
 * @since: 2023-10-21 09:59
 * @description: 搜索关键词统计
 **/
public class DwsTrafficSourceKeywordPageViewWindow extends SqlBaseApp {
    //基本环境准备，表执行环境
    public static void main(String[] args) {
        DwsTrafficSourceKeywordPageViewWindow dwsTrafficSourceKeywordPageViewWindow = new DwsTrafficSourceKeywordPageViewWindow();
        dwsTrafficSourceKeywordPageViewWindow.base(args);
    }
    //业务逻辑处理
    @Override
    public void process(StreamTableEnvironment tableEnv) {
        //使用自定义函数，首先注册函数
        tableEnv.createTemporarySystemFunction("ik_analyze", KeywordUDTF.class);

        //消费页面日志表的数据，从dwd层拿数据
        String topic="dwd_traffic_page_log";
        String groupID="dws_traffic_keyword_group";
        tableEnv.executeSql("create table page_log(" +
                "common map<string,string>," +
                "page map<string,string>," +
                "ts BIGINT,rowtime as TO_TIMESTAMP_LTZ(ts,3)," +
                "watermark for rowtime as rowtime)"+ MyKafkaUtil.getKafkaDDL(topic,groupID));
//        tableEnv.executeSql("select * from page_log").print();
     //过滤出搜索行为
        Table searchTable = tableEnv.sqlQuery("select \n" +
                "page['item'] fullword,\n" +
                "rowtime \n" +
                "from page_log \n" +
                "where \n" +
                "page['page_id']='course_list' " +
                "and page['item_type']='keyword' and page['item'] is not null");
        tableEnv.createTemporaryView("search_table",searchTable);
//        tableEnv.executeSql("select * from search_table").print();
        //使用自定义函数进行分词并关联原表字段、
        Table joinTable = tableEnv.sqlQuery("select\n" +
                "keyword,\n" +
                "rowtime\n" +
                "from search_table,\n" +
                "lateral table(ik_analyze(fullword)) t(keyword)");
        tableEnv.createTemporaryView("join_table",joinTable);
//        tableEnv.executeSql("select * from join_table").print();
//        分组开窗聚合
        Table resultTable = tableEnv.sqlQuery("SELECT \n" +
                "                date_format(window_start,'yyyy-MM-dd HH:mm:ss') stt, \n" +
                "                date_format(window_end,'yyyy-MM-dd HH:mm:ss') edt, \n" +
                "                keyword,\n" +
                "                date_format(rowtime,'yyyy-MM-dd') cur_date,\n" +
                "                count(*) keyword_count\n" +
                "                FROM TABLE(\n" +
                "                TUMBLE(TABLE join_table, DESCRIPTOR(rowtime), INTERVAL '10' second))\n" +
                "                GROUP BY window_start, window_end,keyword,rowtime;");
        tableEnv.createTemporaryView("result_table",resultTable);
//        tableEnv.executeSql("select * from result_table").print();
        tableEnv.executeSql(" create table t2 (\n" +
                "stt string,\n" +
                "edt string,\n" +
                "keyword string,\n" +
                "cur_date string,\n" +
                "keyword_count bigint\n" +
                ") WITH (\n" +
                "      'connector' = 'jdbc',\n" +
//                "      'fenodes' = '"+ EduConfig.DORIS_FE +"',\n" +
//                "      'table.identifier' = '"+EduConfig.DORIS_DB+".dws_traffic_source_keyword_page_view_window',\n" +
                "      'url' = 'jdbc:mysql://hadoop101:9030/edu',"+
                "      'username' = 'root',\n" +
                "      'password' = 'aaaaaa',\n" +
                "      'table-name' = 'dws_traffic_source_keyword_page_view_window'\n" +
                ");");
        tableEnv.executeSql("insert into t2 select * from result_table");
    }
}