package ex.tableapi;

import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.Tumble;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import static org.apache.flink.table.api.Expressions.$;
import static org.apache.flink.table.api.Expressions.lit;

/**
 * kafka作为输入源，通过转换，存储到mysql
 * TODO mysql作为源数据，查询可以使用，此处需上传到服务jar运行进行检测
 [root@master flink1]# ./bin/org.apache.flink run examples/streaming/ex-0.0.1-SNAPSHOT-jar-with-dependencies.jar
 (
 `account_id` BIGINT,
 `amount` BIGINT
 )
 Job has been submitted with JobID ec179184104ad019aec05185e8bc5f7f
 [root@master flink1]#

 */
public class KafkaToMysqlDemo2 {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setRuntimeMode(RuntimeExecutionMode.STREAMING);
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        //kafkas输入
        tEnv.executeSql("CREATE TABLE transactions (" +
                "    account_id  BIGINT," +
                "    amount      BIGINT," +
                "    transaction_time  TIMESTAMP(3)," +
                "    pt AS PROCTIME() ,"+//TODO 改为处理时间 可正常监控
                "    WATERMARK FOR transaction_time AS transaction_time - INTERVAL '5' SECOND" +
                ") WITH (" +
                "    'connector' = 'kafka'," +
                "    'topic'     = 'test1'," +
                "    'properties.bootstrap.servers' = '192.168.1.6:9092'," +
                "    'properties.group.id' = 'testGroup'," +
                "    'scan.startup.mode' = 'earliest-offset'," +
                "    'format'    = 'csv'" +
                ")");
        //mysql输出
        tEnv.executeSql("CREATE TABLE spend_report (" +
                "    account_id BIGINT," +
                "    log_ts     TIMESTAMP(3)," +
                "    amount     BIGINT," +
                "    PRIMARY KEY (account_id, log_ts) NOT ENFORCED" +
                ") WITH (" +
                "   'connector'  = 'jdbc'," +
                "   'url'        = 'jdbc:mysql://localhost:3306/test?characterEncoding=utf8&serverTimezone=Asia/Shanghai&useSSL=false'," +
                "   'table-name' = 'spend_report'," +
                "   'driver'     = 'com.mysql.cj.jdbc.Driver'," +
                "   'username'   = 'root'," +
                "   'password'   = 'liji@2024'," +
                "   'sink.buffer-flush.max-rows' = '0'" +
                ")");

        Table source = tEnv.from("transactions");

        //转换
        Table result = source.window(Tumble.over(lit(1).minutes()).on($("pt")).as("log_ts"))
                .groupBy($("account_id"), $("log_ts"))
                .select(
                        $("account_id"),
                        $("log_ts").start().as("log_ts"),
                        $("amount").sum().as("amount"));


        String createPrintOutDDL = "CREATE TABLE printOutTable (" +
                "  `account_id` BIGINT," +
                "  `amount` BIGINT" +
                ") WITH (" +
                " 'connector' = 'print' " +
                ")";
        //查看转换后格式
        result.printSchema();
//        tEnv.executeSql(createPrintOutDDL);
//        result.executeInsert("printOutTable");

        result.insertInto("spend_report").execute();
    }

//1,10,2024-01-24 19:04:19
//2,22,2024-01-24 19:04:19
//3,33,2024-01-24 19:04:24
//4,44,2024-01-24 19:35:51
//5,55,2024-01-25 19:04:19
//6,66,2024-01-25 19:04:29
//7,77,2024-01-25 19:03:55
//8,88,2024-01-25 19:05:08
//9,99,2024-01-25 19:35:15
//10,10,2024-01-25 21:05:26
//11,11,2024-01-25 21:35:36
//12,12,2024-01-26 19:05:07
}
