package com.atguigu.app.dws;
//流量域搜索关键词粒度页面浏览各窗口汇总表

import com.atguigu.bean.KeywordBean;
import com.atguigu.func.SplitFunction;
import com.atguigu.utils.KafkaUtil;
import com.atguigu.utils.KeyWordUtil;
import com.atguigu.utils.MyClickHouseUtil;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

//数据流:web/app -> logfile -> flume -> kafka(ODS) -> FlinkApp -> Kafka(DWD)->flinkApp->clickhouse(DWS)
//程 序：Mock -> logfile -> f1.sh -> Kafka(ZK) -> BaseLogApp -> Kafka(ZK)->DwsTrafficKeywordPageViewWindow->clickhouse(zk)

//todo 1.获取执行环境
//todo 2.使用flink sql方式读取页面日志主题数据创建动态表 注意提取事件时间
//todo 3.过滤出需要的搜索数据
//todo 4.注册UDTF并使用其进行分词处理
//todo 5.分组、开窗、聚合
//todo 6.将动态表转换为流
//todo 7.将数据写出到clickhouse
//todo 8.启动任务
public class Dws01TrafficKeywordPageViewWindow {
    public static void main(String[] args) throws Exception {
        //todo 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //todo 生产环境一定要写，测试注释掉，否则每次测试都得开hdfs
//        需要从checkpoint或savepoint启动程序
//        //2.1 开启checkpoint，每隔5s钟做一次ck，并指定ck的一致性语义
//        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);//exactly once：默认barrier对齐
//        //2.2 设置超时时间为1min
//        env.getCheckpointConfig().setCheckpointTimeout(60*1000L);//设置超时时间设置checkpoint的超时时间为1min，是指做一次checkpoint的时间；如果超时则认为本次checkpoint失败，这个checkpoint就丢了，继续一下一次checkpoint即可
//        //2.3设置两次重启的最小时间间隔为3s
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
//        //2.4设置任务关闭的时候保留最后一次ck数据
//        env.getCheckpointConfig().enableExternalizedCheckpoints(
//                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
//        );
//        //2.5 指定从ck自动重启策略
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(
//                3, Time.days(1L),Time.minutes(1L)
//        ));
//        //2.6 设置状态后端
//        env.setStateBackend(new HashMapStateBackend());//本地状态位置
//        env.getCheckpointConfig().setCheckpointStorage(
//                "hdfs://hadoop102:8020/flinkCDC/220828"
//        );//checkpoint状态位置
//        //2.7 设置访问HDFS的用户名
//        System.setProperty("HADOOP_USER_NAME","atguigu");

        /*
        dwd：页面日志（已经将曝光displays和动作actions移除了）
        {"common":{"ar":"370000","ba":"Xiaomi","ch":"web","is_new":"0","md":"Xiaomi 10 Pro ","mid":"mid_2190279","os":"Android 11.0","uid":"688","vc":"v2.1.134"},"page":{"during_time":10266,"last_page_id":"home","page_id":"search"},"ts":1651303984000}
        {"common":{"ar":"370000","ba":"Xiaomi","ch":"web","is_new":"0","md":"Xiaomi 10 Pro ","mid":"mid_2190279","os":"Android 11.0","uid":"688","vc":"v2.1.134"},"page":{"during_time":10267,"item":"苹果手机","item_type":"keyword",last_page_id":"search","page_id":"good_list"},"ts":1651303984000}

         */
        //todo 2.使用flink sql方式读取页面日志主题数据创建动态表 注意提取事件时间
        String topic="dwd_traffic_page_log";
        String groupId="keyword_page_view_220828";

        tableEnv.executeSql("create table page_log(\n" +
//                " common map<string,string>,\n" +//不需要可以注释掉
                " page map<string,string>,\n" +
                " ts bigint,\n" +
                " rt as TO_TIMESTAMP_LTZ(ts, 3),\n" +//事件开窗要求建表时提取事件时间和指定watermark
                " watermark for rt as rt-INTERVAL '2' SECOND\n" +
                ")"+ KafkaUtil.getKafkaDDL(topic,groupId));


        //todo 3.过滤出需要的搜索数据(last_page_id是搜索search，item_type是keyword，item不为null，因为我们要对它切词，如果为null，就表示发生了数据丢失，多加几个条件保险一些，其实上一个页面是search就够了，少加就有可能出问题，多加就没有问题)
        Table filterTable = tableEnv.sqlQuery("select\n" +
                "page['item'] item,\n" +
                "rt\n" +//事件时间要提取出来后面作开窗
                "from page_log \n" +
                "where page['last_page_id']='search'\n" +
                "and page['item_type']='keyword'\n" +
                "and page['item'] is not null");
        tableEnv.createTemporaryView("filter_table",filterTable);

        //todo 4.注册自定义的UDTF函数并使用其进行分词处理
        tableEnv.createTemporaryFunction("splitfunction", SplitFunction.class);
        Table afterSplitTable = tableEnv.sqlQuery("select\n" +
                "word,\n" +//word是切分后的单词，是@FunctionHint(output = @DataTypeHint("ROW<word STRING>"))
                "item,\n" +//原始字段
                "rt\n" + //rt事件时间要取出来，后面要做开窗
                "from filter_table,lateral table(splitfunction(item))");
        tableEnv.createTemporaryView("after_split_table",afterSplitTable);


        //todo 5.分组、开窗、聚合（列名一定要跟javabean保持一致，否则到时候转不了javabean）
        Table resultTable = tableEnv.sqlQuery("" +
                "select\n" +
                "   date_format(tumble_start(rt,INTERVAL '10' SECOND),'yyyy-MM-dd HH:mm:ss') stt,\n" +
                "   date_format(tumble_end(rt,INTERVAL '10' SECOND),'yyyy-MM-dd HH:mm:ss') edt,\n" +
                "   word keyword,\n" +
                "   count(*) keyword_count,\n" +
                "   UNIX_TIMESTAMP() ts\n" +//用系统时间作为clickhouse的版本号，当故障重启，重复消费的时候，根据clickhouse order by字段取版本最新的，版本相同取最后一条
                "from after_split_table\n" +
                "group by word,\n" +
                "tumble(rt,INTERVAL '10' SECOND)");
//        resultTable.execute().print();

        tableEnv.createTemporaryView("result_table",resultTable);


        //todo 6.将动态表转换为流（转换为javabean的流，就能通过反射知道有几列并写到clickhouse对应的占位符里)
        DataStream<KeywordBean> keywordDS = tableEnv.toAppendStream(resultTable, KeywordBean.class);
        keywordDS.print("要写入clickhouse的数据");

        //todo 7.将数据写出到clickhouse
        keywordDS.addSink(MyClickHouseUtil.getSinkFunction("insert into dws_traffic_keyword_page_view_window values(?,?,?,?,?)"));

        //todo 8.启动任务
        env.execute("DwsTrafficKeywordPageViewWindow");

    }
}
