package com.atguigu.education.app.dws;

import com.atguigu.education.app.func.KeywordUDTF;
import com.atguigu.education.bean.KeywordBean;
import com.atguigu.education.common.GmallConstant;
import com.atguigu.education.util.ClickHouseUtil;
import com.atguigu.education.util.KafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * 关键词统计
 */
public class DwsTrafficKeywordWindow {
    public static void main(String[] args) throws Exception {
        //TODO 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //设置并行度
        env.setParallelism(1);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //TODO 2 设置检查点和状态后端
        /*     //先开启  多长时间去设置一个状态后端 检查点模式一般使用精准一次
        env.enableCheckpointing(5*60*1000L, CheckpointingMode.EXACTLY_ONCE);
        //超时时间
        env.getCheckpointConfig().setCheckpointTimeout(3*60*1000L);
        //最多可以出现几个状态后端
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        //状态保存位置
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        //上面保存位置如果是在hdfs上的话 那么需要用户名
        System.setProperty("HADOOP_USER_NAME","atguigu");
        //开一下状态后端
        env.setStateBackend(new HashMapStateBackend());
        */
        //TODO 3 使用sql的形式读取数据dwd_traffic_page_log
//        page>>>>>>>>> {"common":{"sc":"2","ar":"14","uid":"8","os":"Android 11.0","ch":"xiaomi","is_new":"0","md":"Xiaomi Mix2 ","mid":"mid_339","vc":"v2.0.1","ba":"Xiaomi","sid":"9ffe1423-3b66-4071-af0a-44c437e71faf"},"page":{"page_id":"exam","item":"196","during_time":1057000,"item_type":"paper_id","last_page_id":"course_detail"},"ts":1662208768378}

        String page_topic = "dwd_traffic_page_log";
        String groupID = "dws_traffic_keyword_window";
        tableEnv.executeSql("create table page_log(\n" +
                "   `common` map<string,string>,\n" +
                "   `page` map<string,string>,\n" +
                "   `ts` bigint, \n" +
                "    rt AS TO_TIMESTAMP_LTZ(ts, 3),  \n" +
                "   WATERMARK FOR  rt AS rt - INTERVAL '2' SECOND " +
                ")" + KafkaUtil.getKafkaDDL(page_topic,groupID));

        // TODO 4 过滤出关键词数据
        Table filterTable = tableEnv.sqlQuery("select \n" +
                "   `page`['item'] keyword,\n" +
                "    rt \n" +
                "from page_log\n" +
                "where `page`['item_type']='keyword'\n" +
                "and `page`['page_id']='course_list'\n" +
                "and `page`['item'] is not null");

        tableEnv.createTemporaryView("filter_table",filterTable);

        //TODO 5 调用拆词的函数对keyword进行拆分
        // 注册需要使用的拆分函数
        tableEnv.createTemporaryFunction("analyze_keyword", KeywordUDTF.class);

        Table wordTable = tableEnv.sqlQuery("select \n" +
                "    rt ,  \n" +
                "  word\n" +
                "FROM filter_table,\n" +
                "LATERAL TABLE(analyze_keyword(keyword))");
        tableEnv.createTemporaryView("word_table",wordTable);

        // TODO 6 开窗聚合词语
        Table countTable = tableEnv.sqlQuery("select \n" +
                "  DATE_FORMAT(TUMBLE_START(rt, interval '10' second), 'yyyy-MM-dd HH:mm:ss') stt,\n" +
                "  DATE_FORMAT(TUMBLE_END(rt, interval '10' second), 'yyyy-MM-dd HH:mm:ss')  edt,\n" +
                "  word keyword,\n" +
                "  count(*) score, \n " +
                "'" +  GmallConstant.KEYWORD_PAGE + "'  page_id , \n " +
                "  UNIX_TIMESTAMP()*1000 ts \n " +
                "from word_table\n" +
                "group by word,\n" +
                "TUMBLE(rt, INTERVAL '10' SECOND)");
//        countTable.execute().print();

        //TODO 7 转换数据为流
        DataStream<KeywordBean> beanDataStream = tableEnv.toAppendStream(countTable, KeywordBean.class);

        //TODO 8 写出到clickhouse中
        String sql = "insert into dws_traffic_keyword_page_view_window values(?,?,?,?,?,?)";
        beanDataStream.addSink(ClickHouseUtil.getClickHouseSinkFunc(sql));

        //TODO 执行任务
        env.execute();
    }
}
