package com.lhd.app.dws;


import com.lhd.app.func.SplitFunction;
import com.lhd.bean.KeywordBean;
import com.lhd.common.utils.MyClickHouseUtil;
import com.lhd.common.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/*

1.获取流量数据 tipic_db  流量关键词来源  dwd_page_log
2.  过滤出关键词
3.  分词   炸列
4. 开窗聚合汇总
5. 转换成流  存入clickhouse









* */
public class DwsTrafficSourceKeywordPageViewWindow {

    public static void main(String[] args) throws Exception {

        //TODO 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);


        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //TODO 2.使用DDL方式读取Kafka page_log 主题的数据创建表并且提取时间戳生成Watermark
        String topic = "dwd_traffic_page_log";
        //一个消费者宕机后，之前分配给他的分区会重新分配给其他的消费者，实现消费者的故障容错
        //消费者组，组内的成员就可以分担多个个分区的压力，提高消费性能
        String groupId = "dws_traffic_source_keyword_page_view_window";

        //页面数据流  {common   page    ts  }
        tableEnv.executeSql("" +
                "create table page_log( " +
                "    `page` map<string,string>, " +
                "    `ts` bigint, " +
                "    `rt` as TO_TIMESTAMP(FROM_UNIXTIME(ts/1000)), " +
                "    WATERMARK FOR rt AS rt - INTERVAL '2' SECOND " +
                " ) " + MyKafkaUtil.getKafkaDDL(topic, groupId));

        //TODO 3.过滤出搜索数据
        Table filterTable = tableEnv.sqlQuery("" +
                " select " +
                "    page['item'] item, " +
                "    rt " +
                " from page_log " +
                " where page['last_page_id'] = 'search' " +  //用户查询，商品推广，智能推荐, 促销活动
                " and page['item_type'] = 'keyword' " +
                " and page['item'] is not null");




        tableEnv.createTemporaryView("filter_table", filterTable);




        //自定义函数  把关键词拆分到对象里面  辉煌八维大数据
        tableEnv.createTemporarySystemFunction("SplitFunction", SplitFunction.class);





        //TODO 4.注册UDTF & 切词
        //LATERAL TABLE  把对象炸裂开来
       Table splitTable = tableEnv.sqlQuery("" +
                "SELECT " +
                "    word, " +
                "    rt " +
                "FROM filter_table,  " +
                "LATERAL TABLE(SplitFunction(item))");
        tableEnv.createTemporaryView("split_table", splitTable);
//       tableEnv.toAppendStream(splitTable, Row.class).print(">>>>>>>>>>222222222222");
//        DataStream<Row> ds = tableEnv.toAppendStream(splitTable, Row.class);

//        ds.keyBy(new KeySelector<Row, String>() {
//            @Override
//            public String getKey(Row value) throws Exception {
//
//
//                return value.getField(1)+"";
//            }
//        }).window(TumblingEventTimeWindows.of(Time.seconds(5l)))
//                .process(new ProcessWindowFunction<Row, String, String, TimeWindow>() {
//                    @Override
//                    public void process(String s, Context context, Iterable<Row> elements, Collector<String> out) throws Exception {
//                        System.out.println(context.currentWatermark()+"------------------");
//                        System.out.println(context.window().getEnd());
//                        System.out.println(context.window().getStart());
//                    }
//                });

////        //TODO 5.分组、开窗、聚合
   /*     SELECT window_start, window_end,sources,word ,count(*)
                FROM TABLE(
                TUMBLE(TABLE filter_table, DESCRIPTOR(bidtime), INTERVAL '10' MINUTES))
        GROUP BY window_start, window_end,sources,word;*/
        Table resultTable = tableEnv.sqlQuery("SELECT " +
                "window_start  stt, " +
                "window_end  edt," +
                "'search' source,"+
                " word keyword," +
                "count(*) keyword_count," +
                "UNIX_TIMESTAMP()*1000 ts " +
                "  FROM TABLE(\n" +
                "    TUMBLE(TABLE split_table, DESCRIPTOR(rt), INTERVAL '10' second))\n" +
                "  GROUP BY word,window_start, window_end");
////        //TODO 6.将动态表转换为流
        DataStream<KeywordBean> keywordBeanDataStream = tableEnv.toAppendStream(resultTable, KeywordBean.class);

////        //TODO 7.将数据写出到ClickHouse
        keywordBeanDataStream.print(">>>>>>>>>>>>>>>>>>>");

//        JdbcSink.sink(
//                sqlDmlStatement,                       // sql
//                jdbcStatementBuilder,                  // 给？赋值
//                jdbcExecutionOptions,                  // 执行方式
//                jdbcConnectionOptions                  // 连接是mysql   oracle   sqlserver  hbase   clickhouse
//        );
        keywordBeanDataStream
                .addSink(MyClickHouseUtil.getSinkFunction("insert into dws_traffic_source_keyword_page_view_window  values(?,?,?,?,?,?)"));
//        keywordBeanDataStream.addSink(MyClickHouseUtil.getSinkFunction("insert into dws_traffic_source_keyword_page_view_window values(?,?,?,?,?,?)"));
        //TODO 8.启动任务
        env.execute("DwsTrafficSourceKeywordPageViewWindow");

    }

}
