package Yanxin.atguigu.yx.app.App_04_DwsApp.Dws_01_Traffic;

import Yanxin.atguigu.yx.app.APP_01_BaseApp.BaseSqlApp;
import Yanxin.atguigu.yx.common.Constant;
import Yanxin.atguigu.yx.tablefuntion.IkAnalyzer;
import Yanxin.atguigu.yx.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class Dws_04_Traffic_keyword_Count extends BaseSqlApp {

    public static void main(String[] args) {
        new Dws_04_Traffic_keyword_Count().init(
                4004,
                2,
                "Dws_04_Traffic_keyword_Count"
        );
    }

    @Override
    protected void handle(StreamExecutionEnvironment env, StreamTableEnvironment tEnv) {

        //1.从kafka拿源数据,并且将其导入flink—sql中
        tEnv.executeSql("create table page(" +
                "page map<string,string>," +
                "ts bigint," +
                "et as to_timestamp_ltz(ts,3)," +
                "watermark for et as et - interval '3' second" +
                ")" + SQLUtil.getKafkaSource(Constant.TOPIC_DWD_TRAFFIC_PAGE,"Dws_04_Traffic_keyword_Count")
        );

        //2.从前面的page表拿数据，将关键词全部拿出来
        /**
         * {
         *   "common": {
         *     "sc": "1",
         *     "ar": "4",
         *     "uid": "3705",
         *     "os": "iOS 13.3.1",
         *     "ch": "Appstore",
         *     "is_new": "1",
         *     "md": "iPhone 8",
         *     "mid": "mid_34",
         *     "vc": "v2.1.132",
         *     "ba": "iPhone",
         *     "sid": "e645965e-ce42-4029-8dde-ac0e9abcec35"
         *   },
         *   "page": {
         *     "page_id": "course_list",
         *     "item": "前端",
         *     "during_time": 9669,
         *     "item_type": "keyword"
         *   },
         *   "ts": 1661937388274
         * }
         */
        Table keywordTable = tEnv.sqlQuery("select " +
                " page['item'] keyword," +
                " et " +
                " from page " +
                " where `page`['item_type'] = 'keyword' " +
                " and `page`['page_id'] = 'course_list' " +
                " and `page`['item'] is not null "
        );

        tEnv.createTemporaryView("keyword_table",keywordTable);


        //3.todo 有必要切词？ 大数据，数据分析可能会切开
        tEnv.createTemporaryFunction("ik_analyzer", IkAnalyzer.class);
        Table kwSplitTable = tEnv.sqlQuery("select " +
                "kw," + //切之前的
                "et" +
                " from keyword_table " +
                " join lateral table(ik_analyzer(keyword)) on true" //切开的keyword

        );
        tEnv.createTemporaryView("kw_split_table",kwSplitTable);

        //4.开窗聚合，按照窗口时间、关键词分组，统计关键词个数
        Table result = tEnv.sqlQuery("select " +
                "date_format(window_start,'yyyy-MM-dd HH:mm:ss') stt, " +
                "date_format(window_end,'yyyy-MM-dd HH:mm:ss') edt, " +
                "'course_list' source, " +
                "kw keyword," +
                "date_format(now(),'yyyy-MM-dd') cur_date, " +
                "count(*) keyword_count " +
                " from table(tumble(table kw_split_table, descriptor(et), interval '5' second )) " +

                " group by window_start,window_end,kw  "
        );

        //5.在环境中创建一个表，并且将这个表与doris关联
        tEnv.executeSql("create table kw_result(" +
                " stt string, " +
                " edt string, " +
                " source string, " +
                " keyword string, " +
                " cur_date string, " +
                " keyword_count bigint) with (" +

                " 'connector' = 'doris', " +
                " 'fenodes' = 'hadoop102:7030', " +
                " 'table.identifier' = 'edu.dws_traffic_source_keyword_page_view_window', " +
                "  'username' = 'root', " +
                "  'password' = '123456' " +
                ")"
        );

        //6.写入数据到环境中，并且环境把数据送到doris
        result.executeInsert("kw_result");
    }
}
