package 实训二;

import com.bw.utils.MyClickHouseUtil;
import com.bw.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwsTrafficSourceKeywordPageViewWindow {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //获取流
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        tenv.executeSql("" +
                "create table page_log( " +
                "    `page` map<string,string>, " +
                "    `ts` bigint, " +
                "    `rt` as TO_TIMESTAMP(FROM_UNIXTIME(ts/1000)), " +
                "    WATERMARK FOR rt AS rt - INTERVAL '2' SECOND " +
                " ) " + MyKafkaUtil.getKafkaDDL("dwd_traffic_page_log", "xxxx"));

        // TODO: 2025/9/17 筛选数据
        Table table = tenv.sqlQuery("" +
                "select " +
                "`page`['item'] item,rt " +
                " from page_log " +
                " where `page`['item_type']='keyword' " +
                " and `page`['last_page_id']='search' " +
                " and `page`['item'] is not null ");
        tenv.createTemporaryView("keyword_page_view", table);

       // tenv.executeSql("select * from keyword_page_view").print();

        // TODO: 2025/9/17  把自定义函数注册到程序中
        tenv.createTemporarySystemFunction("keywordFunction", UDTFTest.class);

        Table table1 = tenv.sqlQuery("select item,word,rt" +
                " from keyword_page_view" +
                ", LATERAL TABLE(keywordFunction(item))");
        tenv.createTemporaryView("laterTable", table1);
        // TODO: 2025/9/17 分组聚合
        Table table2 = tenv.sqlQuery("" +
                "SELECT window_start, " +
                " window_end, " +
                " 'search' AS source," +
                " word," +
                " count(*) count_size," +
                "UNIX_TIMESTAMP()*1000 ts\n" +
                "  FROM TABLE(\n" +
                "    TUMBLE(TABLE laterTable, DESCRIPTOR(rt), INTERVAL '10' SECOND))\n" +
                "  GROUP BY window_start, window_end,word");
        tenv.createTemporaryView("windowTable", table2);
        DataStream<KeyWordBean> dataStream = tenv.toAppendStream(table2, KeyWordBean.class);
        dataStream.addSink(MyClickHouseUtil.getSinkFunction("" +
                "insert into " +
                " gmall.dws_traffic_source_keyword_page_view_window3 values(?,?,?,?,?,?)"));




        env.execute();
    }
}
