package com.bw.gmall.app.dws;

import com.bw.gmall.app.fure.SplitFunction;
import com.bw.gmall.bean.KeywordBean;
import com.bw.gmall.utils.MyClickHouseUtil;
import com.bw.gmall.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwsTrafficSourceKeywordPageViewWindow {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        String topic = "dwd_traffic_page_log";
        String groupId = "dws_traffic_source_keyword_page_view_window";

        //总表
        tableEnv.executeSql("create table page_log( " +
                "    `page` map<string,string>, " +
                "    `ts` bigint, " +
                "    `rt` as TO_TIMESTAMP(FROM_UNIXTIME(ts/1000)), " +
                "    WATERMARK FOR rt AS rt - INTERVAL '2' SECOND " +
                " ) " + MyKafkaUtil.getKafkaDDL(topic, groupId));
        //过滤出搜索数据
        Table filterTable = tableEnv.sqlQuery("" +
                " select " +
                "    page['item'] item, " +
                "    rt " +
                " from page_log " +
                " where page['last_page_id'] = 'search' " +
                " and page['item_type'] = 'keyword' " +
                " and page['item'] is not null");
        //建表
        tableEnv.createTemporaryView("filter_table", filterTable);

        tableEnv.createTemporarySystemFunction("SplitFunction", SplitFunction.class);

        Table splitTable = tableEnv.sqlQuery("select " +
                "word, " +
                "rt " +
                "from filter_table, " +
                "LATERAL TABLE(SplitFunction(item))");
        tableEnv.createTemporaryView("split_table", splitTable);

        Table resultTable = tableEnv.sqlQuery("SELECT " +
                "window_start  stt, " +
                "window_end  edt," +
                "'search' source,"+
                " word keyword," +
                "count(*) keyword_count," +
                "UNIX_TIMESTAMP()*1000 ts " +
                "  FROM TABLE(" +
                "    TUMBLE(TABLE split_table, DESCRIPTOR(rt), INTERVAL '10' second))" +
                "  GROUP BY word,window_start, window_end");
        DataStream<KeywordBean> keywordBeanDataStream = tableEnv.toAppendStream(resultTable, KeywordBean.class);
        keywordBeanDataStream
                .addSink(MyClickHouseUtil
                        .getSinkFunction("insert into dws_traffic_source_keyword_page_view_window  values(?,?,?,?,?,?)"));


    }
}
