package com.bw.gmall.realtime.app.dws;


import com.bw.gmall.realtime.app.func.LaterViewFuntion;
import com.bw.gmall.realtime.app.func.SplitFunction;
import com.bw.gmall.realtime.bean.KeywordBean;
import com.bw.gmall.realtime.utils.MyClickHouseUtil;
import com.bw.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/*

数据的给个来源的关键字做统计    0 - 10   广告    苹果手机    100


页面     item——type     item     last_Page_id    搜索









* */
public class DwsTrafficSourceKeywordPageViewWindow_test {

    public static void main(String[] args) throws Exception {

        //TODO 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //2. ods ->  topic_log     dws层    dwd  dwd_traffic_page_log数据结构
        tableEnv.executeSql("create  table  page_table(\n" +
                "`page`  Map<STRING,STRING>,\n" +
                "`ts`    BIGINT,\n" +
                " rt  as  TO_TIMESTAMP(FROM_UNIXTIME(ts/1000)),\n" +
                " WATERMARK FOR rt AS rt - INTERVAL '2' SECOND\n" +
                ")with(\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = 'dwd_traffic_page_log',\n" +
                "  'properties.bootstrap.servers' = 'hadoop102:9092',\n" +
                "  'properties.group.id' = 'testGroupxx',\n" +
                "  'scan.startup.mode' = 'latest-offset',\n" +
                "  'format' = 'json'\n" +
                ")\n");
        Table filterKeyword = tableEnv.sqlQuery("select  \n" +
                "\n" +
                "   page['item']  item,\n" +
                "   rt\n" +
                "\n" +
                "   from    page_table  " +
                "where    page['last_page_id']='search'  and  " +
                " page['item_type']='keyword'   and   page['item']  is not  null ");

        tableEnv.createTemporaryView("filter_keyword",filterKeyword);

        //炸裂  [a,b,c,d]
        //注册函数
        tableEnv.createTemporaryFunction("laterviewkeyword", LaterViewFuntion.class);
        Table laterViewTable = tableEnv.sqlQuery("select  'search' source,item, word keyword,rt   from  " +
                "filter_keyword, lateral table(laterviewkeyword(item))");
        tableEnv.createTemporaryView("laterViewTable",laterViewTable);
        //10秒窗口聚合
        Table windowWordTable = tableEnv.sqlQuery("select  window_start stt,window_end edt,source,keyword,count(*) keyword_count," +
                "UNIX_TIMESTAMP()*1000  ts  from  table(\n" +
                "tumble(TABLE laterViewTable,DESCRIPTOR(rt),INTERVAL '10' SECOND)\n" +
                ")   group by   window_start,window_end,source,keyword");
//        tableEnv.createTemporaryView("windowWordTable",laterViewTable);
        //存入clickhouse
        DataStream<KeywordBean> dataStream = tableEnv.toAppendStream(windowWordTable, KeywordBean.class);



        //我们之前是不是写的   现在不需要工具类  根据我们的实体类名称 获取对应的value  ?以实现类里面的顺序为主
        dataStream.addSink(MyClickHouseUtil.getSinkFunction("insert into   gmall.dws_traffic_source_keyword_page_view_window  " +
                " values(?,?,?,?,?,?)"));





        env.execute();


    }

}
