package com.atguigu.edu.realtime.app.dws;

import com.atguigu.edu.realtime.app.func.KeywordUDTF;
import com.atguigu.edu.realtime.bean.KeywordBean;
import com.atguigu.edu.realtime.util.MyClickhouseUtil;
import com.atguigu.edu.realtime.util.MyKafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author:ray
 * @time:2022/10/15 15:31
 * @description:
 **/
public class DwsTrafficSourceKeywordPageViewWindow {
    public static void main(String[] args) throws Exception {
        //TODO 1、环境配置
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        tableEnv.createTemporarySystemFunction("ik_analyze", KeywordUDTF.class);
        //TODO 2、检查点设置
        //2.1 设置检查点
        /*env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
        //2.2 设置检查点的超时时间
        env.getCheckpointConfig().setCheckpointTimeout(60000L);
        //2.3 job 取消之后检查点是否保留
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        //2.4 两个检查点之间最小时间间隔
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
        //2.5 设置重启策略
        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30), Time.seconds(3)));
        //2.6 设置状态后端
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        //2.7 设置操作的hadoop用户
        System.setProperty("HADOOP_USER_NAME", "atguigu");*/


        //TODO 3、从kafka的page_log主题中读数据，创建动态表，并分配水位线
        /*{"common":{"sc":"1","ar":"20","uid":"2","os":"Android 11.0","ch":"xiaomi","is_new":"0","md":"Xiaomi 9","mid":"mid_350","vc":"v2.1.134","ba":"Xiaomi","sid":"2f7594ef-f3f1-4ab6-9ab8-84a93dbed169"},"page":{"page_id":"course_list","item":"多线程","during_time":19050,"item_type":"keyword","last_page_id":"home"},"ts":1665752371453}
         */

        tableEnv.executeSql("" +
                "create table page_log( " +
                "  common map<string,string>, " +
                "  page map<string,string>, " +
                "  ts bigint, " +
                "  rowtime as  TO_TIMESTAMP(FROM_UNIXTIME(ts/1000)), " +
                "  WATERMARK FOR rowtime AS rowtime - INTERVAL '3' SECOND )"
                + MyKafkaUtil.getKafkaDDL("dwd_traffic_page_log","dws_traffic_keyword_group"));
        //TODO 4、过滤出搜索行为
        Table searchTable = tableEnv.sqlQuery("" +
                " select page['item'] fullword," +
                " rowtime " +
                " from page_log where page['item_type']='keyword' " +
                " and page['item'] is not null" +
                "");
        tableEnv.createTemporaryView("search_table",searchTable);


        //TODO 5、自定义函数对搜索内容进行分词，并和原表的其他字段进行连接
        Table splitTable = tableEnv.sqlQuery("" +
                "select keyword, rowtime from search_table,lateral table(ik_analyze(fullword)) t(keyword)");
        tableEnv.createTemporaryView("split_table",splitTable);


        //TODO 6、分组，开窗，聚合计算
        Table reduceTable = tableEnv.sqlQuery("select " +
                "DATE_FORMAT(TUMBLE_START(rowtime, INTERVAL '10' SECOND),'yyyy-MM-dd HH:mm:ss') stt,\n" +
                "DATE_FORMAT(TUMBLE_END(rowtime, INTERVAL '10' SECOND),'yyyy-MM-dd HH:mm:ss') edt,\n" +
                "keyword,\n" +
                "count(*) keyword_count,\n" +
                "UNIX_TIMESTAMP()*1000 ts\n" +
                "from split_table\n" +
                "GROUP BY TUMBLE(rowtime, INTERVAL '10' SECOND),keyword");

        //TODO 7、将动态表转换为流
        DataStream<KeywordBean> keywordDS = tableEnv.toAppendStream(reduceTable, KeywordBean.class);
        keywordDS.print(">>>>");
        //TODO 8、将流中的数据写入到clickhouse表中
        keywordDS.addSink(
                MyClickhouseUtil.getSinkFunction("insert into dws_traffic_source_keyword_page_view_window values(?,?,?,?,?)")
        );

        env.execute();
    }
}
