package com.bw.gmall.realtime.app.dws;

import com.bw.gmall.realtime.app.func.SplitFunction;
import com.bw.gmall.realtime.bean.KeywordBean;
import com.bw.gmall.realtime.utils.MyClickHouseUtil;
import com.bw.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
import org.apache.flink.util.Collector;

public class DwsTrafficSourceKeywordPageViewWindow {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);
        //todo 2. 使用DDL方式读取kafka page_log 主题的数据创建表并且提取时间戳生成Watermark
        String topic ="dwd_traffic_page_log";
        //一个消费者宕机后，之前分配给他的分区会重新分配给其他的消费者，实现消费者的故障容错
        //消费者组，组内的成员就可以分担多个个分区的压力，提高消费性能
        String groupId="dws_traffic_source_keyword_page_view_window";
        //页面数据流 {common page ts}

        tenv.executeSql(""+
                "create table page_log("+
                "`page` map<string,string>,"+
                "`ts` bigint,"+
                "`rt` as TO_TIMESTAMP(FROM_UNIXTIME(ts/1000)), "+
                "WATERMARK FOR `rt` AS `rt` - INTERVAL '2' SECOND " +
                ")"+ MyKafkaUtil.getKafkaDDL(topic,groupId));

        //todo 3.过滤出搜索数据

        Table filterTable = tenv.sqlQuery("" +
                " select " +
                " page['item'] item," +
                " rt " +
                " from page_log " +
                "where page['last_page_id'] = 'search' " + //用户查询，商品推广,智能推荐,促销活动
                "and page['item_type']='keyword' " +
                "and page['item'] is not null");

        tenv.createTemporaryView("filter_table",filterTable);



        //自定义函数 把关键词拆分到对象里面 辉煌八维大数据
        tenv.createTemporarySystemFunction("SplitFunction", SplitFunction.class);


        //todo 4.注册UDTF & 切词
        //LATERAL TABLE 把对象炸裂开来
        Table splitTable = tenv.sqlQuery("" +
                " select " +
                " word, " +
                " rt " +
                " FROM filter_table," +
                " LATERAL TABLE(SplitFunction(item))");
        tenv.createTemporaryView("split_table",splitTable);
        tenv.toAppendStream(splitTable, Row.class).print(">>>>>>>>>>>222222");
        DataStream<Row> ds = tenv.toAppendStream(splitTable, Row.class);
        ds.keyBy(new KeySelector<Row, String>() {
            @Override
            public String getKey(Row v) throws Exception {
                return v.getField(1)+"";
            }
        }).window(TumblingEventTimeWindows.of(Time.seconds(5l)))
                .process(new ProcessWindowFunction<Row, String, String, TimeWindow>() {

                    @Override
                    public void process(String s, ProcessWindowFunction<Row, String, String, TimeWindow>.Context context, Iterable<Row> iterable, Collector<String> collector) throws Exception {
                        System.out.println(context.currentWatermark()+"-----------------------");
                        System.out.println(context.window().getEnd());
                        System.out.println(context.window().getStart());
                    }
                });

        //todo 5.分组、开窗、聚合

        Table resultTable = tenv.sqlQuery("select " +
                "window_start stt," +
                "window_end edt," +
                "'search' source," +
                "word keyword," +
                "count(*) keyword_count," +
                "UNIX_TIMESTAMP()*1000 ts " +
                "FROM TABLE(\n " +
                "TUMBLE(TABLE split_table,DESCRIPTOR(rt),INTERVAL '10' second))\n " +
                "GROUP BY word,window_start,window_end");

        //todo 6.将动态表转换为流
        DataStream<KeywordBean> keywordBeanDataStream = tenv.toAppendStream(resultTable, KeywordBean.class);

        //todo 7.将数据写出到ClickHouse
        keywordBeanDataStream.print(">>>>>>>>>>>>>>>>>>>>>>");

        keywordBeanDataStream.addSink(MyClickHouseUtil.getSinkFunction("insert into dws_traffic_source_keyword_page_view_window values(?,?,?,?,?,?)"));

        //todo 8.启动任务
        env.execute("DwsTrafficSourceKeywordPageViewWindow");

    }
}
