package com.bw.app.dws;

import com.bw.app.functions.KeywordUDTF;
import com.bw.bean.KeywordStats;
import com.bw.common.GmallConstant;
import com.bw.utils.ClickHouseUtil;
import com.bw.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class KeywordStatsApp {
    public static void main(String[] args) throws Exception {
        // 1、创建执行环境
        // 1、创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        String groupId = "province_stats_2105b";
        String pageViewSourceTopic = "dwd_page_log";
        // 2、创建动态表，并消费kafka，并注册自定义函数
        // 把kafka page数据映射成一张表
        /*
            common      page   ts
                         {"page_id":"good_list","item":"图书","during_time":7359,"item_type":"keyword","last_page_id":"search"}
                         {"page_id":"good_detail","item":"5","during_time":19734,"item_type":"sku_id","last_page_id":"login","source_type":"query"}
                         {"page_id":"home","during_time":18656}
         */
        tableEnv.executeSql(
                "CREATE TABLE page_view (" +
                        " common MAP<STRING, STRING>," +
                        " page MAP<STRING, STRING>," +
                        " ts BIGINT," +
                        " rowtime as TO_TIMESTAMP(FROM_UNIXTIME(ts/1000,'yyyy-MM-dd HH:mm:ss'))," +
                        " WATERMARK FOR rowtime AS rowtime - INTERVAL '2' SECOND) " +
                        " WITH (" + MyKafkaUtil.getKafkaDDL(pageViewSourceTopic, groupId) +
                        ")");
        // 注册函数
        tableEnv.createTemporarySystemFunction("ik_analyze", KeywordUDTF.class);
        // 3、开窗聚合统计 只要搜索的数据，并变成一张临时表
        /**
         *  fullword   rowtime
         *  图书        20101010
         *  电商        20101010
         *  手机        20101010
         *  衣服        20101010
         */
        Table fullwordTable = tableEnv.sqlQuery(
                "select page['item'] fullword," +
                        "rowtime " +
                        " from page_view " +
                        " where page['page_id']='good_list' and page['item'] IS NOT NULL and page['last_page_id'] ='search' "
        );
        // 通过自定义UDTF函数把
        /** keword  rowtime
         *  图     202020020
         *  书     20202002
         *  手
         *  机
         *  衣服
         *  电商
         *  机
         */
        Table keywordTable = tableEnv.sqlQuery(
                "SELECT keyword, rowtime " +
                        "FROM " + fullwordTable + "," +
                        "LATERAL TABLE(ik_analyze(fullword)) AS t(keyword)"
        );

        Table reduceTable = tableEnv.sqlQuery(
                "select keyword,count(*) ct, '"
                        + GmallConstant.KEYWORD_SEARCH + "' source," +
                        "DATE_FORMAT(TUMBLE_START(rowtime, INTERVAL '10' SECOND),'yyyy-MM-dd HH:mm:ss') stt," +
                        "DATE_FORMAT(TUMBLE_END(rowtime, INTERVAL '10' SECOND),'yyyy-MM-dd HH:mm:ss') edt ," +
                        "UNIX_TIMESTAMP()*1000 ts from " + keywordTable +
                        " group by TUMBLE(rowtime, INTERVAL '10' SECOND),keyword"
        );
        // 4、表转流
        DataStream<KeywordStats> keywordStatsDataStream = tableEnv.toAppendStream(reduceTable, KeywordStats.class);
        // 5、写入ck
        keywordStatsDataStream.print();
        // 插入，顺序跟模型类一致，列名跟表里面的字段一致
        keywordStatsDataStream.addSink(ClickHouseUtil.sink("insert into keyword_stats_2105b (keyword,ct,source,stt,edt,ts) values (?,?,?,?,?,?)"));
//        keywordStatsDataStream.addSink(ClickHouseUtil.sink("insert into keyword_stats_2105b (stt,edt,keyword,source,ct,ts) values (?,?,?,?,?,?)"));
        // 6、启动任务
        env.execute();

    }
}
