package com.bw.day0630;

import com.bw.gmall.realtime.app.fun.SplitFunction;
import com.bw.gmall.realtime.bean.KeywordBean;
import com.bw.gmall.realtime.utils.MyClickHouseUtil;
import com.bw.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

public class DwsTrafficSourceKeywordPageViewWindow {
    public static void main(String[] args) throws Exception {
        //TODO 1.获取执行环境

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        //todo 2.使用DDL方式读取Kafka page_log 主题的数据创建表并且提取时间戳生成Watermark

        String topic = "dwd_traffic_page_log";
        String groupId = "dws_traffic_source_keyword_page_view_window";

        tenv.executeSql("create table page_log (" +
                "page map<string,string>, " +
                "ts bigint, " +
                "rt as to_timestamp(from_unixtime(ts/1000)), " +
                "" +
                "watermark for rt as rt - interval '2' second " +
                ")" + MyKafkaUtil.getKafkaDDL(topic,groupId));

        //TODO 3.过滤出搜索数据

        Table filterTable = tenv.sqlQuery("select " +
                "page['item'] item, " +
                "rt " +
                "from page_log " +
                "where page['last_page_id'] = 'search' " +
                "and page['item_type'] = 'keyword'" +
                "and page['item'] is not null ");


        tenv.createTemporaryView("filter_table",filterTable);

        //自定义函数
        tenv.createTemporaryFunction("SplitFunction",SplitFunction.class);

        //TODO 4.注册UDTF & 切词
        //LATERAL TABLE  把对象炸裂开来

        Table splitTable = tenv.sqlQuery("select " +
                "word, " +
                "rt " +
                "from filter_table, " +
                "lateral table(SplitFunction(item))");
        tenv.createTemporaryView("split_table",splitTable);
//       tenv.toAppendStream(splitTable, Row.class).print(">>>>>>>>>>222222222222");
//        DataStream<Row> ds = tenv.toAppendStream(splitTable, Row.class);
//
//        ds.keyBy(new KeySelector<Row, String>() {
//            @Override
//            public String getKey(Row value) throws Exception {
//
//
//                return value.getField(1)+"";
//            }
//        }).window(TumblingEventTimeWindows.of(Time.seconds(5l)))
//                .process(new ProcessWindowFunction<Row, String, String, TimeWindow>() {
//                    @Override
//                    public void process(String s, Context context, Iterable<Row> elements, Collector<String> out) throws Exception {
//                        System.out.println(context.currentWatermark()+"------------------");
//                        System.out.println(context.window().getEnd());
//                        System.out.println(context.window().getStart());
//                    }
//                });

////        //TODO 5.分组、开窗、聚合
   /*     SELECT window_start, window_end,sources,word ,count(*)
                FROM TABLE(
                TUMBLE(TABLE filter_table, DESCRIPTOR(bidtime), INTERVAL '10' MINUTES))
        GROUP BY window_start, window_end,sources,word;*/
        Table resTable = tenv.sqlQuery("select " +
                "window_start stt, " +
                "window_end edt, " +
                "'search' source, " +
                "word keyword, " +
                "count(*) keyword_count, " +
                "unix_timestamp() * 1000 ts " +
                "from TABLE(" +
                "    TUMBLE(TABLE split_table, DESCRIPTOR(rt), INTERVAL '10' second)) " +
                "GROUP BY word,window_start, window_end ");

        //TODO 6.将动态表转换为流
        DataStream<KeywordBean> keywordBeanDataStream = tenv.toAppendStream(resTable, KeywordBean.class);

        //TODO 7.将数据写出到ClickHouse
        keywordBeanDataStream.print(">>>>>>>>>>>>>>>>>>>>>");

        keywordBeanDataStream.addSink(MyClickHouseUtil.getSinkFunction("insert into dws_traffic_source_keyword_page_view_window values(?,?,?,?,?,?)"));
        env.execute();
    }
}