package com.atguigu.app.dws;

import com.atguigu.app.func.SplitFunction;
import com.atguigu.bean.KeywordStats;
import com.atguigu.utils.MyClickHouseUtil;
import com.atguigu.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class KeyWordStats10sApp {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // 获取kafka数据
        tableEnv.executeSql("create table page_log( " +
                "    `page` Map<String, String>, " +
                "    `ts` bigint, " +
                "    `rt` as to_timestamp(from_unixtime(ts/1000)), " +
                "    watermark for rt as rt - interval '2' second " +
                ")" + MyKafkaUtil.getKafkaDDL("dwd_page_log", "keyword_stats_10s_app_2022"));

        // 过滤出搜素数据
        Table keyWordTable = tableEnv.sqlQuery("select " +
                "    page['item'] key_word, " +
                "    rt " +
                "from page_log " +
                "where  page['last_page_id'] = 'search' " +
                "and page['item'] is not null");

        tableEnv.createTemporaryView("key_word_table", keyWordTable);


        // 注册UDTF函数
        tableEnv.createTemporarySystemFunction("SplitFunction", SplitFunction.class);
        Table wordTable = tableEnv.sqlQuery("select " +
                "    word, " +
                "    rt " +
                "from key_word_table, " +
                "lateral table(SplitFunction(key_word))");
        tableEnv.createTemporaryView("word_table", wordTable);

        // 词频统计、分组、开窗、聚合
        Table resultTable = tableEnv.sqlQuery("select " +
                "    'search' source, " +
                "    date_format(TUMBLE_START(rt, interval '10' second), 'yyyy-MM-dd HH:mm:ss') stt, " +
                "    date_format(TUMBLE_END(rt, interval '10' second), 'yyyy-MM-dd HH:mm:ss') edt, " +
                "    word keyword, " +
                "    count(*) ct, " +
                "    unix_timestamp()*1000 ts " +
                "from word_table " +
                "group by word, " +
                "TUMBLE(rt, interval '10' second)");

        // 将动态表转换成流
        DataStream<KeywordStats> keywordStatsDataStream = tableEnv.toAppendStream(resultTable, KeywordStats.class);
        keywordStatsDataStream.print(">>>>>>>");

        // 将数据写入clickhouse
        keywordStatsDataStream
                .addSink(MyClickHouseUtil
                        .getClickHouseSink("insert into dws_search_keyword_10s_2022(word,ct,source,stt,edt,ts) values(?,?,?,?,?,?)"));

        env.execute();
    }
}
