package com.atguigu.app.dws;

import com.atguigu.bean.KeyWordBean;
import com.atguigu.func.MySplitFunction;
import com.atguigu.func.SplitFunction;
import com.atguigu.utils.KafkaUtil;
import com.atguigu.utils.MyClickhouseUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/*
dws:
10.1 流量域搜索关键词粒度页面浏览各窗口汇总表
10.1.1 主要任务
	从 Kafka 页面浏览明细主题读取数据，过滤搜索行为，使用自定义 UDTF（一进多出）函数对搜索内容分词。统计各窗口各搜索关键词出现频次，写入 ClickHouse。
	自定义函数的逻辑在代码中实现，要完成分词功能，需要导入相关依赖，此处将借助 IK 分词器完成分词。
 */
//todo 1.创建环境
//todo 设置状态后端
//todo 2.使用flink sql方式读取页面日志主题数据创建动态表 注意提取事件时间和watermark
//todo 3.过滤出需要的搜索数据
//todo 4.注册UDTF并使用其进行分词处理
//todo 5.分组、开窗、聚合
//todo 6.将动态表转化为流
//todo 7.将数据写出到clickhouse
//todo 8.启动任务
public class Dws01TrafficKeywordPageViewWindow {
    public static void main(String[] args) throws Exception {
        //todo 1.创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //todo 设置状态后端

        //todo 2.使用flink sql方式读取页面日志主题数据创建动态表 注意提取事件时间和watermark
        tableEnv.executeSql("" +
                "create table topic_page(" +
                "common map<string,string>," +
                "page map<string,string>," +
                "ts bigint," +
                "rt as to_timestamp_ltz(ts,3)," +//后面要按照事件时间进行开窗聚合,日志里的ts是ms级的，所以要写3
                "watermark for rt as rt-INTERVAL '2' SECOND" +
                ")"+ KafkaUtil.getKafkaConnectorDDL("page_topic","keyword220828"));


        //todo 3.过滤出需要的搜索数据

        Table searchTable = tableEnv.sqlQuery("select " +
                "page['item'] item," +//原始字段
                "rt " +//用于后面开窗
                "from topic_page " +
                "where page['last_page_id']='search' " +
                "and page['item_type']='keyword' " +
                "and page['item'] is not null ");

//        searchTable.execute().print();
        tableEnv.createTemporaryView("search_table",searchTable);


        //todo 4.注册UDTF并使用其进行分词处理
        tableEnv.createTemporaryFunction("myUDTFfunc", MySplitFunction.class);
        Table afterSplitTable = tableEnv.sqlQuery("select " +
                "item," +//原始字段
                "word," +//切分后的单词
                "rt " +//用于后面开窗
                "from search_table " +
                "left join lateral table(myUDTFfunc(item)) on true");
        tableEnv.createTemporaryView("after_split_table",afterSplitTable);


        //todo 5.分组、开窗、聚合(列名一定要跟javabean一致，否则转不了JavaBean)
        Table resultTable = tableEnv.sqlQuery("select " +
                "date_format(tumble_start(rt,interval '10' second),'yyyy-MM-dd HH:mm:ss') stt," +
                "date_format(tumble_end(rt,interval '10' second),'yyyy-MM-dd HH:mm:ss') edt," +
                "word keyword," +
                "count(*) keyword_count," +
                "UNIX_TIMESTAMP() ts " +
                "from after_split_table " +
                "group by " +
                "word," +
                "tumble(rt,interval '10' second)");



//        //目前暂不支持用DDL将数据写到clickhosue，需要将结果表转换为流
//        tableEnv.createTemporaryView("result_table",resultTable);
//        tableEnv.executeSql("" +
//                "create table clickhousekeywordtable(" +
//                "stt string," +
//                "edt string," +
//                "keyword string," +
//                "keyword_count bigint," +
//                "ts bigint)" +
//                "with (" +
//                "'connector'='jdbc'," +
//                "'url'='jdbc:clickhouse://hadoop102:8123/gmall_220828'," +
//                "'driver'='ru.yandex.clickhouse.ClickHouseDriver'," +
//                "'table-name'='dws_traffic_keyword_page_view_window'" +
//                ")");
//
//        tableEnv.executeSql("insert into clickhousekeywordtable select * from result_table");




        //todo 6.将动态表转化为流

        DataStream<KeyWordBean> keyWordBeanDS = tableEnv.toAppendStream(resultTable, KeyWordBean.class);

        keyWordBeanDS.print("即将写入clickhouse的数据:");
//
        //todo 7.将数据写出到clickhouse
        keyWordBeanDS.addSink(MyClickhouseUtil.getSinkFunction("insert into dws_traffic_keyword_page_view_window values(?,?,?,?,?)"));


        //todo 8.启动任务
        env.execute("Dws01TrafficKeywordPageViewWindow");//DS调用方法时就需要这个env.execute()
    }
}
