package com.atguigu.edu.realtime220815.app.dws;

import com.atguigu.edu.realtime220815.app.func.KeywordSplitUDTF;
import com.atguigu.edu.realtime220815.bean.KeywordBean;
import com.atguigu.edu.realtime220815.util.ClickHouseUtils;
import com.atguigu.edu.realtime220815.util.KafkaUtils;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Classname DwsTrafficSourceKeywordPageViewWindow
 * @Description TODO
 * @Date 2023/2/16 11:26
 * @Created by lzx
 */
public class DwsTrafficSourceKeywordPageViewWindow {
    public static void main(String[] args) throws Exception {
        /*
        1.创建流式执行环境,创建表执行环境,设置并行度,注册自定义的函数
        2.检查点设置
        3.从dwd的页面浏览主题中读取数据
        4.过滤出搜索行为
        5.进行分词操作,与原表中字段进行关联
        6.对相同关键词进行分组,聚合,开窗
        7.将动态表转换成流
        8.将流中数据写入clickhouse
         */

        //1.
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        env.setParallelism(4);
        tableEnv.createTemporaryFunction("spilt_keyword", KeywordSplitUDTF.class);

        //2.
        /*env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.setStateBackend(new HashMapStateBackend());
        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30),Time.seconds(3)));
        env.getCheckpointConfig().setCheckpointTimeout(60000);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000);
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop101:8020/eduRealTime/ck");
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        System.setProperty("HADOOP_USER_NAME","atguigu");*/

        //3.
        /*
        create table page_log (
            common map<String,String> ,
            page map<String,String>,
            ts bigint ,
            order_time as to_timestamp(from_unixtime(ts/1000)),
            WATERMARK FOR order_time AS order_time - INTERVAL '2' SECOND
        )
         */
        tableEnv.executeSql("create table page_log (\n" +
                "            common map<String,String> ,\n" +
                "            page map<String,String>,\n" +
                "            ts bigint ,\n" +
                "            order_time as to_timestamp(from_unixtime(ts/1000)),\n" +
                "            WATERMARK FOR order_time AS order_time - INTERVAL '2' SECOND\n" +
                "        ) "+ KafkaUtils.getKafkaSqlWith("dwd_traffic_page_log","dws_traffic_keyword_group"));
        //tableEnv.executeSql("select * from page_log").print();

        //4.
        /*
        last_page_id 是 来源,要根据来源分组
        item 不为空 item_type 是 keyword
        select
            page['item'] fullword,
            order_time
        from page_log
        where page['item'] is not null and page['item_type'] = 'keyword' and page['last_page_id'] = 'search'
         */
        Table fullwordTable = tableEnv.sqlQuery("select\n" +
                "            page['item'] fullword,\n" +
                "            page['last_page_id'] source,\n" +
                "            order_time\n" +
                "        from page_log\n" +
                "        where page['item'] is not null and page['item_type'] = 'keyword' ");
        tableEnv.createTemporaryView("fullword_table",fullwordTable);
        //tableEnv.executeSql("select * from fullword_table").print();

        //5.
        /*
        SELECT word,source,order_time
        FROM fullword_table,
        LATERAL TABLE(spilt_keyword(fullword)) t(word)
         */
        Table wordTable = tableEnv.sqlQuery("SELECT word,source,order_time\n" +
                "        FROM fullword_table,\n" +
                "        LATERAL TABLE(spilt_keyword(fullword)) t(word)");
        tableEnv.createTemporaryView("word_table",wordTable);
        //tableEnv.executeSql("select * from word_table").print();

        //6.
        /*
        select
            TUMBLE_START(order_time, INTERVAL '10' second) stt,
            TUMBLE_END(order_time, INTERVAL '10' second) edt,
            word keyword,
            source,
            count(*) keyword_count,
            UNIX_TIMESTAMP()*1000 ts
        from word_table
        group by word,source,TUMBLE(order_time, INTERVAL '10' second)
         */
        Table sqlQuery = tableEnv.sqlQuery("select\n" +
                "            date_format(TUMBLE_START(order_time, INTERVAL '10' second) , 'yyyy-MM-dd HH:mm:ss') stt,\n" +
                "            date_format(TUMBLE_END(order_time, INTERVAL '10' second), 'yyyy-MM-dd HH:mm:ss') edt,\n" +
                "            word keyword,\n" +
                "            source ,\n" +
                "            count(*) keyword_count,\n" +
                "            UNIX_TIMESTAMP()*1000 ts\n" +
                "        from word_table\n" +
                "        group by word,source,TUMBLE(order_time, INTERVAL '10' second)");
        //7.
        DataStream<KeywordBean> dataStream = tableEnv.toDataStream(sqlQuery, KeywordBean.class);
        //dataStream.print();
//
        //8.将流中数据写入clickhouse,
        dataStream.addSink(ClickHouseUtils.getClickHouseSinkFunction("insert into dws_traffic_source_keyword_page_view_window  values (?,?,?,?,?,?)"));

        env.execute();
    }
}
