package com.bw.yk02.app;

import com.bw.yk02.bean.SearchKeyWord;
import com.bw.yk02.fnc.MyIKanalyzerFunction;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.connector.jdbc.JdbcStatementBuilder;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

import java.sql.PreparedStatement;
import java.sql.SQLException;

public class FlinkDemo04 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .useBlinkPlanner()
                .inStreamingMode()
                .build();

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env,settings);

        tableEnv.createTemporaryFunction("split_word", MyIKanalyzerFunction.class);

        tableEnv.executeSql("CREATE TABLE page_log (\n" +
                "  `common` Map<STRING,STRING>,\n" +
                "  `page` Map<STRING,STRING>,\n" +
                "  `ts` BIGINT,\n" +
                "  `rowtime` TO_TIMESTAMP(FROM_UNIXTIME(ts/1000)),\n" +
                "  `WATERMARK  FOR rowtime AS rowtime \n" +
                ") WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = 'dwd_page_log',\n" +
                "  'properties.bootstrap.servers' = '192.168.18.105:9092',\n" +
                "  'properties.group.id' = 'flinkdemo2_group',\n" +
                "  'scan.startup.mode' = 'earliest-offset',\n" +
                "  'format' = 'json'\n" +
                ")");
        
        Table result = tableEnv.sqlQuery("SELECT page['item'] words," +
                " ts," +
                " rowtime" +
                " FROM page_log " +
                " where page['item'] is not null " +
                " and page['last_page_id']='search'" +
                " and page['item_type']='keyword'"
        );
        tableEnv.createTemporaryView("view_page_log_search_keywords",result);



        Table user_page_log = tableEnv.sqlQuery("select rowtime,keyword11111 " +
                "from view_page_log_search_keywords" +
                " ,LATERAL TABLE(split_word(words))" );

        tableEnv.createTemporaryView("view_split_search_keywords",user_page_log);

        Table result2 = tableEnv.sqlQuery("select " +
                "'SEARCH' source," +
                "FROM_UNIXTIME(TUMBLE_START(rowtime, INTERVAL '10' SECOND),'yyyy-MM-dd HH:ss:mm') stt," +
                "FROM_UNIXTIME(TUMBLE_END(rowtime, INTERVAL '10' SECOND),'yyyy-MM-dd HH:ss:mm') end," +
                "keyword word," +
                "count(*) cnt," +
                "unix_timestamp()*1000 ts " +
                "from view_split_search_keywords" +
                "group by word," +
                "TUMBLE(rowtime, INTERVAL '10' SECOND) " );

//        DataStream<Row> keywordStatsDataStream1 = tableEnv.toAppendStream(result2, Row.class);


        DataStream<SearchKeyWord> keywordStatsDataStream = tableEnv.toAppendStream(result2, SearchKeyWord.class);
        keywordStatsDataStream.print();

        keywordStatsDataStream.addSink(
                JdbcSink.sink("insert into keyword_stats_yk02 (keyword,ct,stt,edt,ts) " +
                                "values(?,?,?,?,?,?)"
                        ,new JdbcStatementBuilder<SearchKeyWord>()
                        {
                            @Override
                            public void accept(PreparedStatement preparedStatement, SearchKeyWord searchKeyWord) throws SQLException {
                                preparedStatement.setObject(1,searchKeyWord.getWord());
                                preparedStatement.setObject(2,searchKeyWord.getCount());
                                preparedStatement.setObject(3,searchKeyWord.getSource());
                                preparedStatement.setObject(4,searchKeyWord.getStt());
                                preparedStatement.setObject(5,searchKeyWord.getEdt());
                                preparedStatement.setObject(6,searchKeyWord.getTs());
                            }
                        },
                        JdbcExecutionOptions.builder()
                                .withBatchSize(0)
                                .build()
                        ,new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
                                .withDriverName("ru.yandex.clickhouse.ClickHouseDriver")
                                .withUrl("jdbc:clickhouse://hadoop-single:8123/default")
                                .build()

                )
        );


        env.execute("FlinkDemo02");
    }
}
