package com.atguigu.bigdata.gmall.realtime.app.dws;

import com.atguigu.bigdata.gmall.realtime.app.BaseSQLApp;
import com.atguigu.bigdata.gmall.realtime.bean.TrafficKeywordCount;
import com.atguigu.bigdata.gmall.realtime.common.Constant;
import com.atguigu.bigdata.gmall.realtime.function.KWSplit;
import com.atguigu.bigdata.gmall.realtime.util.FlinkSinkUtil;
import com.atguigu.bigdata.gmall.realtime.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class Dws_01_DwsTrafficKeywordCount extends BaseSQLApp {
    public static void main(String[] args) {
        new Dws_01_DwsTrafficKeywordCount().init(
                3004,
                2,
                "Dws_01_DwsTrafficKeywordCount"
        );
    }
    @Override
    protected void handle(StreamExecutionEnvironment env,
                          StreamTableEnvironment tEnv) {
        //1.建立一个动态表与dwd页面日志关联
        tEnv.executeSql(" create table dwd_traffic_page( " +
                            " page map<string,string>, " +
                            " ts bigint, " +
                            " et as to_timestamp_ltz(ts,3), " +  //定义时间属性：事件时间 3表示ts是毫秒，0表示ts是秒
                            " watermark for et as et - interval '3' second " +  //interval '3' second" 乱序程度3秒
                            " )" + SQLUtil.getKafkaSource(Constant.TOPIC_DWD_TRAFFIC_PAGE,"Dws_01_DwsTrafficKeywordCount")
        );
        //tEnv.sqlQuery("select * from dwd_traffic_page").execute().print();

        //2.过滤搜索记录，读取搜索关键词
        Table kwTable = tEnv.sqlQuery(" select " +
                " page['item'] kw," +
                " et" +
                " from dwd_traffic_page" +
                " where (page['item_type']='keyword')" +
                " and page['item'] is not null "
        );
       // kwTable.execute().print();
        tEnv.createTemporaryView("kw_table",kwTable);

        //3.对关键词进行分词
        tEnv.createTemporaryFunction("kw_split", KWSplit.class);
        Table keywordTable = tEnv.sqlQuery(" select" +
                " keyword," +
                " et" +
                " from kw_table " +
                " join lateral table(kw_split(kw)) on true"   //join lateral  内连接
        );
        //keywordTable.execute().print();
        tEnv.createTemporaryView("keyword_table",keywordTable);

        //4.统计分词后的每个词的出现的次数：分组开窗聚合
        Table result = tEnv.sqlQuery(" select" +
                " date_format(window_start,'yyyy-MM-dd HH:mm:ss') stt," +
                " date_format(window_end,'yyyy-MM-dd HH:mm:ss') edt," +
                " keyword," +
                " count(keyword) keyword_count," +
                " unix_timestamp() * 1000 ts" +
                " from  table( tumble( table keyword_table, descriptor(et), interval '5' second ) )" +
                " group by keyword,window_start, window_end"
        );
        //5.写出到clickhouse中
        //把table 转成流
        tEnv
                .toRetractStream(result, TrafficKeywordCount.class)
                .filter(t -> t.f0)
                .map(t -> t.f1)
                .addSink(FlinkSinkUtil.getClickHouseSink("dws_traffic_keyword_count",TrafficKeywordCount.class));


        try {
            env.execute();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
}
