package com.zy.gmall.realtime.app.dws;

import com.zy.gmall.realtime.app.func.KeywordUDTF;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

//使用Sql 流量域 分词
public class DwsTrafficSourceKeywordPageViewWindow{
    public static void main(String[] args) {
        //1 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);//创建表环境

        //2 创建保存点
        env.enableCheckpointing(50000L);
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3,3000L));
        //3 获取kafka上游的数据源   使用SQL方式   问题：测试没有吃到数据  前置app没开 sql语句
        //主题 topic_pages
        //{"common":{"ar":"13","uid":"46","os":"Android 13.0","ch":"wandoujia","is_new":"1","md":"OPPO Remo8","mid":"mid_498","vc":"v2.1.134","ba":"OPPO","sid":"bf03677e-8a09-45eb-9caa-7dd2e6a80ff9"},
        // "page":{"page_id":"trade","item":"14,19,20,1,4","during_time":12433,"item_type":"sku_ids","last_page_id":"cart"},
        // "ts":1654771816000}

        //这个没有指定row_time和水位线  因为下游要进行开窗计算
//        tableEnv.executeSql("CREATE TABLE KafkaSource (\n" +
//                "  `common` MAP<STRING,STRING> ,\n" +
//                "  `page` MAP<STRING,STRING> ,\n" +
//                "   ts STRING )" + KafkaUtil.getKafkaSinkConnector("topic_pages","DwsTrafficSourceKeywordPageViewWindow"));
        tableEnv.executeSql("CREATE TABLE KafkaSource (\n" +
                "  `common` MAP<STRING,STRING>,\n" +
                "  `page` MAP<STRING,STRING>,\n" +
                "  ts bigint,\n" +
                "  row_time as TO_TIMESTAMP(FROM_UNIXTIME(ts/1000)),\n" +
                "  WATERMARK FOR row_time AS row_time\n" +
                ") WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = 'topic_pages',\n" +
                "  'properties.bootstrap.servers' = 'hadoop102:9092,hadoop103:9092,hadoop104:9092',\n" +
                "  'properties.group.id' = 'DwsTrafficSourceKeywordPageViewWindow',\n" +
                "  'scan.startup.mode' = 'latest-offset',\n" +
                "  'format' = 'json'\n" +
                ")");
        // tableEnv.executeSql("select * from KafkaSource").print()
        //4 筛选出查询的数据  !!!!!!!!!!!!!单词错了
        Table searchTable = tableEnv.sqlQuery("select page['item'] fullword,row_time\n" +
                "from KafkaSource where page['last_page_id']='search' and page['item_type']='keyword' \n" +
                "and page['item'] is not null");
        tableEnv.createTemporaryView("search_table", searchTable);
        //tableEnv.executeSql("select * from search_table").print();

        //5 分词，查询出的词和原表和一起
        //要用自定义函数 先注册
        tableEnv.createTemporarySystemFunction("eval", KeywordUDTF.class);
        Table bin_table = tableEnv.sqlQuery("SELECT word,row_time\n" +
                "FROM search_table,\n" +
                "LATERAL TABLE (eval(fullword)) t(word)");
        tableEnv.createTemporaryView("binTable",bin_table);
        //tableEnv.executeSql("select * from binTable").print();

        //6 分组 开窗 聚合
        Table resTable = tableEnv.sqlQuery(
                "SELECT window_start stt, " +
                "window_end edt," +
                "word keyword," +
                "DATE_FORMAT(window_start,'yyyyMMdd') cur_date," +
                "count(word) keyword_count\n" +
                "  FROM TABLE(\n" +
                "    TUMBLE(TABLE binTable, DESCRIPTOR(row_time), INTERVAL '3' second))\n" + //有问题，使用事件时间，相差时间太大，一个窗口闭不上
                "  GROUP BY window_start, window_end,word");
        tableEnv.createTemporaryView("resTable",resTable);
        //tableEnv.executeSql("select * from resTable").print();
        //7 插入doris
        tableEnv.executeSql("CREATE TABLE flink_doris_sink (\n" +
                "    stt TIMESTAMP(3),\n" +
                "    edt TIMESTAMP(3),\n" +
                "    keyword STRING,\n" +
                "    cur_date STRING,\n" +
                "    keyword_count BIGINT\n" +
                "    ) \n" +
                "    WITH (\n" +
                "      'connector' = 'doris',\n" +
                "      'fenodes' = 'hadoop102:7030',\n" +
                "      'table.identifier' = 'gmall.dws_traffic_source_keyword_page_view_window',\n" +
                "      'username' = 'root',\n" +
                "      'password' = '000000',\n" +
//                "      'sink.label-prefix' = 'doris_label',\n" +
                "       'sink.properties.format' = 'json',\n" +
                "       'sink.properties.read_json_by_line' = 'true',\n" +
                "       'sink.buffer-count' = '4', " +
                "       'sink.buffer-size' = '4086', " +
                "       'sink.enable-2pc' = 'false'\n" +
                ")");

        tableEnv.executeSql("insert into flink_doris_sink select * from resTable");

    }
}
