package com.atguigu.edu.app.dws;

import com.atguigu.edu.app.func.UDTFKeywordsFunc;
import com.atguigu.edu.bean.KeywordBean;
import com.atguigu.edu.util.ClickHouseUtil;
import com.atguigu.edu.util.KafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwsTrafficSourceKeywordPageViewWindow {
    public static void main(String[] args) throws Exception {
        // TODO 1 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // TODO 2 设置状态后端
        // 目前只在本地测试，用不到状态后端
        /*
        env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
        env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME", "atguigu");
                 */

        // TODO 3 读取page页面主题数据
        String page_topic = "dwd_traffic_page_log";
        String groupId = "dws_traffic_source_keyword_page_view_window";
        tableEnv.executeSql("create table page_log(\n" +
                "  `common` map<STRING,STRING>,\n" +
                "  `page` map<STRING,STRING>,\n" +
                "  `ts` bigint,\n" +
                "  `rt` as TO_TIMESTAMP_LTZ(ts, 3) ,\n" +
                "  WATERMARK FOR rt as rt - INTERVAL '2' SECOND\n" +
                ")" + KafkaUtil.getKafkaDDL(page_topic,groupId));

        // TODO 4 过滤页面中的关键字
        Table keywordsTable = tableEnv.sqlQuery("select \n" +
                "  `page`['item'] keywords,\n" +
                "  `rt`\n" +
                "from page_log\n" +
                "where `page`['item_type']='keyword'");
        tableEnv.createTemporaryView("keywords_table",keywordsTable);

        // TODO 5 拆分关键字
        // 注册拆词的函数
        tableEnv.createTemporaryFunction("split_keywords", UDTFKeywordsFunc.class);
        Table keywordTable = tableEnv.sqlQuery("select\n" +
                "  keyword,\n" +
                "  rt\n" +
                "from keywords_table,\n" +
                "LATERAL TABLE(split_keywords(keywords))");
        tableEnv.createTemporaryView("keyword_table",keywordTable);

        // TODO 6 开窗统计关键词次数
        Table countsTable = tableEnv.sqlQuery("select\n" +
                "  DATE_FORMAT(TUMBLE_START(rt,INTERVAL '10' second),'yyyy-MM-dd HH:mm:ss') as stt,\n" +
                "  DATE_FORMAT(TUMBLE_END(rt,INTERVAL '10' second),'yyyy-MM-dd HH:mm:ss') as edt,\n" +
                "  '123' err,\n" +
                "  keyword,\n" +
                "  count(*) keyword_count,\n" +
                "  UNIX_TIMESTAMP()*1000 ts  \n" +
                "from keyword_table\n" +
                "group by TUMBLE(rt,INTERVAL '10' second), keyword");
        DataStream<KeywordBean> keywordBeanDataStream = tableEnv.toAppendStream(countsTable, KeywordBean.class);

        keywordBeanDataStream.print("bean >>>>");


        // TODO 7 将数据写出到clickHouse
        keywordBeanDataStream.addSink(ClickHouseUtil.getClickHouseSink("insert into dws_traffic_source_keyword_page_view_window values(?,?,?,?,?)"));

        // TODO 8 执行任务
        env.execute(groupId);


    }
}
