package com.raylu.realtime.app.dws;

import com.raylu.realtime.app.func.ConvertTimeStampFunction;
import com.raylu.realtime.app.func.KeywordTableFunctionByIk;
import com.raylu.realtime.bean.KeywordStats;
import com.raylu.realtime.utils.ClickHouseUtil;
import com.raylu.realtime.utils.KafkaSourceUtil;
import com.raylu.realtime.utils.PropertiesUtil;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.util.Properties;

/**
 * Description:
 * <p>
 * Create by lucienoz on 2022/1/13.
 * Copyright © 2022 lucienoz. All rights reserved.
 */
public class KeywordStatsApp {

    public static void main(String[] args) throws Exception {
        Properties load = PropertiesUtil.load("config.properties");
        //TODO 1. 准备flink运行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);

        //TODO 2. 设置检查点
//        env.enableCheckpointing(5000L);
//        env.getCheckpointConfig().setCheckpointTimeout(60*1000L);
//        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
//        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, Time.seconds(3)));
//        env.setStateBackend(new FsStateBackend(load.getProperty("keyword.stats.app.fsstatebackend.url")));
//        System.setProperty("HADOOP_USER_NAME", "raylu");
        //TODO 3. 准备表运行环境

        StreamTableEnvironment streamTableEnvironment = StreamTableEnvironment.create(env);

        //TODO 4. 注册函数

        streamTableEnvironment.createTemporaryFunction("to_timestamp",new ConvertTimeStampFunction());
        streamTableEnvironment.createTemporaryFunction("keyword",new KeywordTableFunctionByIk());

        //TODO 5. 将数据注册成flink流表
        streamTableEnvironment.executeSql("CREATE TABLE PAGE_LOG (\n" +
                "  during_time     STRING,\n" +
                "  item            STRING,\n" +
                "  item_type       STRING,\n" +
                "  last_page_id    STRING,\n" +
                "  page_id         STRING,\n" +
                "  source_type     STRING,\n" +
                "  ar              STRING,\n" +
                "  ba              STRING,\n" +
                "  ch              STRING,\n" +
                "  is_new          STRING,\n" +
                "  md              STRING,\n" +
                "  mid             STRING,\n" +
                "  os              STRING,\n" +
                "  uid             STRING,\n" +
                "  vc              STRING,\n" +
                "  ts\t\t\t\t      BIGINT,\n" +
                "    rowtime as to_timestamp(ts),\n" +
                "    WATERMARK FOR rowtime AS rowtime - INTERVAL '3' SECOND\n" +
                ") WITH (\n" +
                KafkaSourceUtil.getConnectorDDl(load.getProperty("keyword.stats.app.kafka.source-topic1"), load.getProperty("keyword.stats.app.kafka.group-id"), "json")
                +
                ")");

        //TODO 6. 使用udtf函数keyword将关键词取出，并将结果放入view中

        streamTableEnvironment.executeSql("create view keyword_table" +
                " as " +
                "select T.keyword as keyword," +
                "       rowtime " +
                "from PAGE_LOG a,LATERAL TABLE(keyword(item)) as T " +
                "where a.page_id = 'good_list' " +
                "  and item is not null ");

        Table table = streamTableEnvironment.sqlQuery(
                "select TUMBLE_START(rowtime, INTERVAL '5' SECOND) as stt," +
                        "  TUMBLE_END(rowtime, INTERVAL '5' SECOND) as edt, " +
                        "  keyword," +
                        "  '' as source," +
                        "  count(0) as cnt " +
                        " from keyword_table " +
                        " group by keyword,TUMBLE(rowtime, INTERVAL '5' SECOND)");

        DataStream<KeywordStats> keywordStatsDS = streamTableEnvironment.toAppendStream(table, KeywordStats.class);

        //TODO 7. 将聚合好的数据写入clickhouse
        keywordStatsDS.print();
        keywordStatsDS.addSink(ClickHouseUtil.getJdbcSink("INSERT INTO gmall.dws_keyword_stats (stt, edt, keyword, source, cnt) VALUES (?, ?, ?, ?, ?)"));



        env.execute();


    }


}
