package com.atguigu.app.dws;

import com.atguigu.app.func.UDTFKeywordsFunc;
import com.atguigu.bean.KeywordBean;
import com.atguigu.util.ClickHouseUtil;
import com.atguigu.util.KafkaUtil;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableConfig;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwsTrafficSourceKeywordPageViewWindow {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(env);

        //强制10秒关窗
        TableConfig config = tableEnvironment.getConfig();
        config.getConfiguration().setBoolean("table.exec.emit.early-fire.enabled",true);
        config.getConfiguration().setString("table.exec.emit.early-fire.delay","10s");

        String page_topic = "dwd_traffic_page_log";
        String groupId = "dws_traffic_source_keyword_page_view_window";
        tableEnvironment.executeSql("create table page_log(" +
                "`common` map<STRING,STRING>," +
                "`page` map<STRING,STRING>," +
                "`ts` bigint," +
                "`rt` as TO_TIMESTAMP_LTZ(ts,3)," +
                " WATERMARK FOR rt AS rt - INTERVAL '2' SECOND)" + KafkaUtil.getKafkaDDL(page_topic, groupId));
        Table keywordsTable = tableEnvironment.sqlQuery("select \n" +
                "`page`['item'] keyword,\n" +
                "rt\n" +
                "from page_log " +
                "where `page`['item_type']='keyword'");
        tableEnvironment.createTemporaryView("keywords_table", keywordsTable);

        /*tableEnvironment.createFunction("split_keywords", UDTFKeywordsFunc.class);

        Table keywordTable = tableEnvironment.sqlQuery("select keyword,rt from keywords_table,lateral table(split_keywords(keywords))");*/
        //keywordTable.execute().print();
        tableEnvironment.createTemporaryView("keyword_table", keywordsTable);

        /*tableEnvironment.sqlQuery("select " +
                "keyword," +
                "count(*) keyword_count " +
                "from keyword_table " +
                "group by TUMBLE(rt, INTERVAL '10' SECOND),keyword").execute().print();*/

        Table countsTable = tableEnvironment.sqlQuery("select \n" +
                "  DATE_FORMAT(TUMBLE_START(rt, INTERVAL '10' SECOND) , 'yyyy-MM-dd HH:mm:ss') AS stt,\n" +
                "  DATE_FORMAT(TUMBLE_END(rt, INTERVAL '10' SECOND) , 'yyyy-MM-dd HH:mm:ss') AS edt,\n" +
                "  'search' source,\n" +
                "  '123' err,\n" +
                "  keyword, \n" +
                "  count(*) keyword_count, \n" +
                "  UNIX_TIMESTAMP()*1000 ts \n" +
                "from keyword_table\n" +
                "group by \n" +
                "TUMBLE(rt, INTERVAL '10' SECOND),\n" +
                "keyword");
        // countsTable.execute().print();
        DataStream<Tuple2<Boolean, KeywordBean>> keywordBeanDataStream = tableEnvironment.toRetractStream(countsTable, KeywordBean.class);
        SingleOutputStreamOperator<KeywordBean> mapStream = keywordBeanDataStream.map(new MapFunction<Tuple2<Boolean, KeywordBean>, KeywordBean>() {
            @Override
            public KeywordBean map(Tuple2<Boolean, KeywordBean> value) throws Exception {
                return value.f1;
            }
        });
        //mapStream.print("bean>>>>");
        mapStream.addSink(ClickHouseUtil.getClickHouseSink("insert into dws_traffic_source_keyword_page_view_window values (?,?,?,?,?,?)"));
        env.execute();
    }
}
