package com.zhang.gmall.app.dws;

import com.zhang.gmall.app.func.SplitWordFunc;
import com.zhang.gmall.beans.KeywordStats;
import com.zhang.gmall.utils.ClickHouseUtil;
import com.zhang.gmall.utils.KafkaUtil;
import com.zhang.gmall.utils.MyEnv;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @title: 搜索关键词统计
 * @author: zhang
 * @date: 2022/3/26 21:56
 */
public class KeywordStats10sApp {
    public static void main(String[] args) throws Exception {

        //TODO 1.获取环境
        StreamExecutionEnvironment env = MyEnv.getStreamingEnv(4, false);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //TODO 2.读取kafka dwd_page_log数据
        tableEnv.executeSql("" +
                "create table page_log (" +
                "       `page` map<string,string>," +
                "       `ts` bigint," +
                "       `rt` as TO_TIMESTAMP(FROM_UNIXTIME(ts/1000))," +
                "       WATERMARK FOR rt AS rt - INTERVAL '2' SECOND " +
                ")" + KafkaUtil.getKafkaDDL("dwd_page_log", "KeywordStats10sApp"));
       /* Table table = tableEnv.sqlQuery("select * from page_log");
        tableEnv.toChangelogStream(table).print();*/

        //TODO 3.过滤出搜索关键词
        Table keyWordTable = tableEnv.sqlQuery("" +
                "select " +
                "page['item'] key_word," +
                "rt " +
                "from page_log " +
                "where page['last_page_id'] = 'search'" +
                "and page['item'] is not null");
        tableEnv.createTemporaryView("key_word", keyWordTable);
        //tableEnv.toChangelogStream(keyWordTable).print();

        //TODO 4.注册自定义分词函数
        tableEnv.createTemporarySystemFunction("splitFunction", SplitWordFunc.class);

        //TODO 5.注册UDTF,切词
        Table wordTable = tableEnv.sqlQuery("" +
                "select " +
                "   word ," +
                "   rt " +
                "from key_word, " +
                "LATERAL TABLE(splitFunction(key_word)) t(word)");
        tableEnv.createTemporaryView("word_table", wordTable);
        //tableEnv.toChangelogStream(wordTable).print();

        //TODO 6.词频统计,分组、开窗、聚合
        Table resultTable = tableEnv.sqlQuery("" +
                "select " +
                "       'search' source," +
                "       DATE_FORMAT(window_start,'yyyy-MM-dd HH:mm:ss') stt," +
                "       DATE_FORMAT(window_end,'yyyy-MM-dd HH:mm:ss') edt," +
                "       word keyword," +
                "       count(*) ct," +
                " UNIX_TIMESTAMP()*1000 ts " +
                " FROM TABLE (" +
                " TUMBLE( TABLE word_table," +
                " DESCRIPTOR(rt) ," +
                " INTERVAL '10' SECOND)) " +
                " group by word,window_start,window_end");
        tableEnv.toChangelogStream(resultTable).print();
        //TODO 7.将表转化为动态流
        DataStream<KeywordStats> keywordStatsDS = tableEnv.toAppendStream(resultTable, KeywordStats.class);

        //TODO 8.写入clickhouse
        keywordStatsDS
                .addSink(
                        ClickHouseUtil
                                .getClickHouseSink("insert into dws_search_keyword_10s(keyword,ct,source,stt,edt,ts) values(?,?,?,?,?,?)")
                );
        //TODO 执行任务
        env.execute("KeywordStats10sApp");

    }
}
