package com.zhu.app.dws;

import com.zhu.bean.KeyWordBean;
import com.zhu.function.SplitFunction;
import com.zhu.utils.ClickHouseUtil;
import com.zhu.utils.ZhuKafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * DWS 层 流量域 关键词需求
 */
public class DWSTrafficSourceKeywordPageViewWindowApp {

    public static void main(String[] args) throws Exception {

        //todo env
        StreamExecutionEnvironment streamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
        streamExecutionEnvironment.setParallelism(4);   //test  生产环境设置为 kafka主题的分区数量
        StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(streamExecutionEnvironment);
        tableEnvironment.createTemporarySystemFunction("splitFunction", SplitFunction.class);

         //checkPoint
        /*
        streamExecutionEnvironment.enableCheckpointing(5 * 60000L, CheckpointingMode.EXACTLY_ONCE); //精确一次
        //状态后端
        streamExecutionEnvironment.setStateBackend(new HashMapStateBackend());
        streamExecutionEnvironment.getCheckpointConfig().setCheckpointStorage(ClusterParametersConfig.HDFS_CHECKPOINT_FILE_DIR);  //检查点保存在hdfs
        System.setProperty("HADOOP_USER_NAME", "zhu");
        streamExecutionEnvironment.getCheckpointConfig().setCheckpointTimeout(10 * 60000L);  //TimeOut
        streamExecutionEnvironment.getCheckpointConfig().setMaxConcurrentCheckpoints(2);  //最大共存检查点
      */

        //todo ddl 读取kafka page_log 创建表并且提取时间戳 生成watermark
        String topic = "dwd_traffic_page_log";
        String groupId = "dws_traffic_source_keyword_page_view_window";
        tableEnvironment.executeSql(" " +
                "create table page_log (" +
                "`page` map<string,string>, " +
                "`ts` bigInt, " +
                "`rt` AS TO_TIMESTAMP(FROM_UNIXTIME(ts/1000,'yyyy-MM-dd HH:mm:ss')), " +
                "  WATERMARK FOR rt AS rt - INTERVAL '2' SECOND ) "
                + ZhuKafkaUtil.getKafkaDDL(topic,groupId)
                );

        //todo 过滤出search数据 注册 UDT-F  切词
        Table searchTable = tableEnvironment.sqlQuery(
                "select " +
                        "page['item'] full_word, " +
                        "rt " +
                        "from page_log " +
                        "where page['item'] is not null " +
                        "and page['last_page_id'] ='search' " +
                        "and page['item_type'] = 'keyword'");
        tableEnvironment.createTemporaryView("search_table",searchTable);
      //  tableEnvironment.toDataStream(searchTable, Row.class).print();


        //todo 分组开窗聚合
        Table splitTable = tableEnvironment.sqlQuery(
                "select " +
                        "keyword, " +
                        "rt " +
                        "from search_table, " +
                        "lateral table(splitFunction(full_word)) as t(keyword)");    //t 虚拟表名  keyword 里面的字段 关键词
        tableEnvironment.createTemporaryView("split_table",splitTable);
     //   tableEnvironment.toDataStream(splitTable,Row.class).print();



        /*
        +I[电视, 2023-04-01T10:25]
+I[苹果, 2023-04-01T10:25:01]
+I[手机, 2023-04-01T10:25:01]
+I[手, 2023-04-01T10:25:01]
+I[机, 2023-04-01T10:25:01]
+I[苹果, 2023-04-01T10:25:01]
+I[手机, 2023-04-01T10:25:01]
+I[手, 2023-04-01T10:25:01]
+I[机, 2023-04-01T10:25:01]
{"common":{"ar":"440000","uid":"677","os":"Android 11.0","ch":"xiaomi","is_new":"1","md":"Xiaomi 10 Pro "
,"mid":"mid_942281","vc":"v2.1.134","ba":"Xiaomi"},
"page":{"page_id":"register","during_time":4211,"last_page_id":"login"},"ts":1680230799000
         */
        Table keyWordBeanSeacrh = tableEnvironment.sqlQuery(
                "select " +
                        "date_format(tumble_start(rt, interval '10' second),'yyyy-MM-dd HH:mm:ss') stt," +   //窗口开始函数
                        "date_format(tumble_end(rt, interval '10' second),'yyyy-MM-dd HH:mm:ss') edt, " +    //窗口结束函数
                        "'search' source, " +
                        "keyword, " +
                        "count(*) keyword_count, " +
                        "UNIX_TIMESTAMP()*1000 ts " +
                        "from split_table " +
                        "group by keyword,tumble(rt,interval '10' second)");
        DataStream<KeyWordBean> keyWordBeanDataStream = tableEnvironment.toAppendStream(keyWordBeanSeacrh, KeyWordBean.class);
        keyWordBeanDataStream.print(">>>>>");


        /*
        // TODO 6. 分组、开窗、聚合计算
        Table KeywordBeanSearch = tableEnvironment.sqlQuery("select\n" +
                "DATE_FORMAT(TUMBLE_START(rt, INTERVAL '10' SECOND),'yyyy-MM-dd HH:mm:ss') stt,\n" +
                "DATE_FORMAT(TUMBLE_END(rt, INTERVAL '10' SECOND),'yyyy-MM-dd HH:mm:ss') edt,\n'" +
                 GmallConstant.KEYWORD_SEARCH + "' source,\n" +
                "keyword,\n" +
                "count(*) keyword_count,\n" +
                "UNIX_TIMESTAMP()*1000 ts\n" +
                "from split_table\n" +
                "GROUP BY TUMBLE(rt, INTERVAL '10' SECOND),keyword");

        //todo 转换成动态流
        tableEnvironment.toAppendStream(KeywordBeanSearch, KeyWordBean.class).print(">>>");

         */

        //todo  写出到 ClickHouse
        keyWordBeanDataStream.addSink(ClickHouseUtil.getClickHouseSinkFunction("" +
                "insert into dws_traffic_source_keyword_page_view_window values(?,?,?,?,?,?)"));


        //todo execute
        streamExecutionEnvironment.execute();
    }
}
