package com.atguigu.app.dws;

/**
 * @author hjy
 * @create 2023/3/14 16:44
 */

import com.atguigu.Func.SplitFunction;
import com.atguigu.Util.ClickHouseUtil;
import com.atguigu.Util.MyKafkaUtil;
import com.atguigu.bean.KeywordBean;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * 流量域搜索关键词力度页面浏览各窗口汇总表
 * 数据流:web/app -> 日志服务器(file) -> flume -> Kafka(ODS) -> FlinkApp -> Kafka(DWD) -> FlinkApp -> ClickHouse(DWS)
 * 程 序:Mock -> file -> f1.sh -> Kafka(ZK) -> BaseLogApp -> Kafka(ZK) -> Dws01TrafficKeywordPageViewWindow -> ClickHouse(ZK)
 */
public class Dws01_TrafficKeywordPageViewWindow {
    public static void main(String[] args) throws Exception {
        //todo 1 获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
//        env.enableCheckpointing(5000L);
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall-flink/check");
//        env.getCheckpointConfig().setCheckpointTimeout(60000L);
//        env.setStateBackend(new HashMapStateBackend());
        //设置HDFS用户信息
        //System.setProperty("HADOOP_USER_NAME", "atguigu");
        //todo 2 从页面主题获取数据(dwd_traffic_page_log) 使用tableAPI 同时提取时间生成时间戳
        String topic ="dwd_traffic_page_log";
        String groupId="KeywordPage";
        tableEnv.executeSql("create table page_log(\n" +
                "   `common` Map<STRING,STRING>,\n" +
                "   `page` MAP<STRING,STRING>,\n" +
                "   `ts` BIGINT,\n" +
                "   rt AS TO_TIMESTAMP_LTZ(ts, 3) ,\n" +
                "  WATERMARK FOR rt AS rt - INTERVAL '2' SECOND\n" +
                ")"+MyKafkaUtil.getKafkaDDL(topic,groupId));
        //todo 3 过滤数据--->搜索行为(last_page_id)='search' item_type 是keyword item 不为null
        Table filterTable = tableEnv.sqlQuery("select \n" +
                " rt,\n" +
                " `page`['item'] item\n" +
                "from page_log\n" +
                "where `page`['last_page_id']='search'\n" +
                "and `page`['item_type']='keyword'\n" +
                "and `page`['item'] is not null");
        tableEnv.createTemporaryView("filter_table",filterTable);
        //todo 4 注册自定义udtf函数
        /**
         * 首先要使用写一个工具类用来分词  使用的是ik
         * 自定义udtf函数类需要
         */
        tableEnv.createTemporarySystemFunction("SplitFunction", SplitFunction.class);

        //todo 5 使用自定义UDTF函数分词
        Table splitTable = tableEnv.sqlQuery("SELECT \n" +
                "  rt,\n" +
                "  word\n" +
                "  FROM filter_table, LATERAL TABLE(SplitFunction(item))");
        tableEnv.createTemporaryView("split_table",splitTable);
        //todo 6 分组开窗聚合计算
        Table resultTable = tableEnv.sqlQuery("SELECT \n" +
                "   date_format(window_start,'yyyy-MM-dd HH:mm:ss') stt,\n" +
                "   date_format(window_end,'yyyy-MM-dd HH:mm:ss') edt,\n" +
                "   word keyword, \n" +
                "   count(*) keyword_count,\n" +
                "   UNIX_TIMESTAMP() ts\n" +
                "FROM TABLE(\n" +
                "    TUMBLE(TABLE split_table, DESCRIPTOR(rt), INTERVAL '10' SECONDS))\n" +
                "GROUP BY word,window_start, window_end");
        tableEnv.createTemporaryView("result_table",resultTable);
        //todo 7 写入数据到clickhouse
        /**
         * 这里写入clickhouse 使用的是JDBCSInk 将数据转为流 并将数据格式转化为javaBean
         * 然后将流中的数据写出到clickHouse 这里使用jdbcSink 我们可以将这个写入封装到一个Util类
         * 这里封装util类需要考虑通用性
         */
        DataStream<KeywordBean> keywordBeanDS = tableEnv.toAppendStream(resultTable, KeywordBean.class);
        keywordBeanDS.print(">>>>>>>>>>>>>>>>>>>>>>>>");
        keywordBeanDS.addSink(ClickHouseUtil.getSinkFunction("insert into dws_traffic_keyword_page_view_window values(?,?,?,?,?)"));
        //todo 8 启动程序
        env.execute("Dws01_TrafficKeywordPageViewWindow");
    }
}
