package realtime.app.dws;

import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import realtime.bean.KeywordBean;
import realtime.func.SplitFunction;
import realtime.util.ClickHouseUtil;
import realtime.util.MyKafkaUtil;

public class Dws01Traffic_Keyword_PageViewWindow {
    public static void main(String[] args) throws Exception {
        //TODO 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

//        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(10000L);
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/flinkcdc/220926");

        //设置HDFS用户信息
        //System.setProperty("HADOOP_USER_NAME", "atguigu");


        //TODO 2.使用FlinkSQL读取kafka页面日志主题数据，同时提取时间戳生成WaterMark
        //-----------------------为什么要提取时间戳生成WaterMark？？？
        String topic = "dwd_traffic_page_log";
        String groupId = "keyword_220926";
        tableEnv.executeSql("" +
                "create table page_log(\n" +
                "    `common` MAP<STRING,STRING>,\n" +
                "    `page` MAP<STRING,STRING>,\n" +
                "    `ts` BIGINT,\n" +
                "    rt AS TO_TIMESTAMP_LTZ(ts,3),\n" +
                "    WATERMARK FOR rt AS rt - INTERVAL '2' SECOND\n" +
                ")" + MyKafkaUtil.getKafkaDDL(topic, groupId));

//        tableEnv.sqlQuery("select * from page_log").execute().print();
        //TODO 3.过滤出搜索数据
        //满足以下三个条件的即为搜索行为数据：
        //① page 字段下 item 字段不为 null；
        //② page 字段下 last_page_id 为 search；
        //③ page 字段下 item_type 为 keyword

        Table filterTable = tableEnv.sqlQuery("" +
                "select\n" +
                "    `page`['item'] item,\n" +
                "    rt\n" +
                "from page_log\n" +
                "where `page`['last_page_id'] = 'search'\n" +
                "and `page`['item_type'] = 'keyword'\n" +
                "and `page`['item'] is not null");

        tableEnv.createTemporaryView("filter_table", filterTable);

        //TODO 4.注册自定义UDTF函数 //UDTF 一(行)进多(行)出 与列无关 可以多列
        tableEnv.createTemporarySystemFunction("SplitFunction", SplitFunction.class);

        //TODO 5.使用自定义函数切词
        Table splitTable = tableEnv.sqlQuery("" +
                "SELECT \n" +
                "    rt, \n" +  //rt开窗的时候使用
                "    word\n" +  //这个是炸裂出来的字段，就是SplitFunction里的word
                "FROM filter_table, \n" +
                "LATERAL TABLE(SplitFunction(item))"); //这个sql语法怎么写要看官网 和hiveSql语法不太一样

        tableEnv.createTemporaryView("split_table", splitTable);

        //TODO 6.分组开窗聚合
        Table resultTable = tableEnv.sqlQuery("" +
                "SELECT\n" +
                "    date_format(window_start,'yyyy-MM-dd HH:mm:ss') stt,\n" +
                "    date_format(window_end,'yyyy-MM-dd HH:mm:ss') edt,\n" +
                "    word keyword,\n" +
                "    count(*) keyword_count,\n" +
                "    UNIX_TIMESTAMP() ts\n" +  //用函数加上的一个时间戳字段
                "FROM TABLE( \n" +
                "    TUMBLE(TABLE split_table, DESCRIPTOR(rt), INTERVAL '10' SECONDS))\n" +
                "GROUP BY word,window_start, window_end");
        tableEnv.createTemporaryView("result_table", resultTable);

        //TODO 7.将数据写出到ClickHouse

        DataStream<KeywordBean> keywordBeanDataStream = tableEnv.toAppendStream(resultTable, KeywordBean.class);
        keywordBeanDataStream.print(">>>>>>>>>>>>>>>");

        keywordBeanDataStream.addSink(ClickHouseUtil.getSinkFunction("insert into dws_traffic_keyword_page_view_window values(?,?,?,?,?)"));

        //TODO 8.启动任务
        env.execute("Dws01Traffic_Keyword_PageViewWindow");
    }
}
