package com.atguigu.realtime.app.dws;

import com.atguigu.realtime.common.GmallConfig;
import com.atguigu.realtime.func.KeywordUDTF;
import com.atguigu.realtime.utils.MyKafkaUtil;
import jdk.nashorn.internal.ir.EmptyNode;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author: 洛尘
 * @since: 2023-10-11 14:42
 * @description: dws
 *      zk、kafka、flume、doris、DwdTrafficBaseLogSplit、DwsTrafficSourceKeywordPageViewWindow
 **/
public class DwsTrafficSourceKeywordPageViewWindow {
    public static void main(String[] args) {
        //1.基本环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //1.4注册自定义函数
        tableEnv.createTemporarySystemFunction("ik_analyze", KeywordUDTF.class);
        //2.检查点（略）敲一遍设置
        env.enableCheckpointing(5000L);//不开启检查点数据进不去不理解为什么
        /*//设置检查点超时时间
        env.getCheckpointConfig().setCheckpointTimeout(60000L);
        //设置job取消后检查点是否保留
        env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        //设置两个检查点之间的最小时间间隔
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
        //设置重启策略
        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30),Time.seconds(2)));
        //设置状态后端
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop105:8020/gmall/ck");
        //设置操作hadoop的用户
        System.setProperty("HADOOP_USER_NAME","atguigu");*/

        //3.从kafka的页面日志主题中读取数据，创建动态表 并指定watermark以及提取事件时间字段
            tableEnv.executeSql("CREATE TABLE page_log (\n" +
                    "    common map<string,string>,\n" +
                    "    page map<string,string>,\n" +
                    "    ts BIGINT,\n" +
                    "    rowtime as TO_TIMESTAMP_LTZ(ts,3), \n" +
                    "    WATERMARK FOR rowtime AS rowtime\n" +
                    ")"+ MyKafkaUtil.getKafkaDDL("dwd_traffic_page_log","dws_traffic_keyword_group"));
//                    tableEnv.executeSql("select * from page_log").print();
//        4.过滤出搜索行为
        Table searchTable = tableEnv.sqlQuery("select\n" +
                "page ['item'] fullword,\n" +
                "rowtime\n" +
                "from page_log where page['last_page_id']='search'\n" +
                "and page['item_type']='keyword' and page['item'] is not null");
        tableEnv.createTemporaryView("searchTable",searchTable);
//        tableEnv.executeSql("select * from searchTable").print();
//        5.使用自定义函数UDTF进行分词，并和原表字段进行关联
        Table joinTable = tableEnv.sqlQuery("SELECT " +
                "keyword, " +
                "rowtime\n" +
                "FROM searchTable,\n" +
                "LATERAL TABLE(ik_analyze(fullword)) t(keyword)");
        tableEnv.createTemporaryView("joinTable",joinTable);
//        tableEnv.executeSql("select * from joinTable").print();
//        6.分组，开窗，聚合计算
        Table resultTable = tableEnv.sqlQuery("SELECT " +
                "date_format(window_start,'yyyy-MM-dd HH:mm:ss') stt, " +
                "date_format(window_end,'yyyy-MM-dd HH:mm:ss') edt, " +
                "keyword," +
                "date_format(rowtime,'yyyy-MM-dd') cur_date," +
                "count(*) keyword_count" +
                "  FROM TABLE(\n" +
                "    TUMBLE(TABLE joinTable, DESCRIPTOR(rowtime), INTERVAL '10' second))\n" +
                "  GROUP BY window_start, window_end,keyword,rowtime;");
        tableEnv.createTemporaryView("resultTable",resultTable);
//        tableEnv.executeSql("select * from resultTable").print();
//官网上给的格式不能变，所以如果自己增加GmallConfig.DORIS_FE这种配置需要用“++”这种格式添加拼接
        tableEnv.executeSql("create table doris_t(\n" +
                "stt string,\n" +
                "edt string,\n" +
                "keyword string,\n" +
                "cur_date string,\n" +
                "keyword_count bigint\n" +
                ") WITH (\n" +
                "      'connector' = 'doris',\n" +
                "      'fenodes' = '"+ GmallConfig.DORIS_FE +"',\n" +
                "      'table.identifier' = '"+GmallConfig.DORIS_DB+".dws_traffic_source_keyword_page_view_window',\n" +
                "      'username' = 'root',\n" +
                "      'password' = 'aaaaaa',\n" +
                "      'sink.enable-2pc' = 'false'\n" +
                ");");

        tableEnv.executeSql("insert into doris_t select * from resultTable");

    }


}