package org.kfive.nova;

import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;

/**
 * @program: nova
 * @author: zhanwu.lzw
 * @create: 2021-12-09 21:07
 **/
public class CdnLog {

    public static void main(String[] args) throws Exception {
        // Kafka {"msg": "welcome flink users..."}
        String sourceDDL = "CREATE TABLE kafka_source (\n" +
            " msg STRING\n" +
            ") WITH (\n" +
            " 'connector' = 'kafka-0.11',\n" +
            " 'topic' = 'cdn-log',\n" +
            " 'properties.bootstrap.servers' = 'localhost:9092',\n" +
            " 'format' = 'json',\n" +
            " 'scan.startup.mode' = 'latest-offset'\n" +
            ")";

        // Mysql
        String sinkDDL = "CREATE TABLE mysql_sink (\n" +
            " msg STRING \n" +
            ") WITH (\n" +
            "  'connector' = 'jdbc',\n" +
            "   'url' = 'jdbc:mysql://localhost:3306/finkdb?characterEncoding=utf-8&useSSL=false',\n" +
            "   'table-name' = 'cdn_log',\n" +
            "   'username' = 'root',\n" +
            "   'password' = '123456',\n" +
            "   'sink.buffer-flush.max-rows' = '1'\n" +
            ")";

        EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
        TableEnvironment tEnv = TableEnvironment.create(settings);

        tEnv.executeSql(sourceDDL);
        tEnv.executeSql(sinkDDL);

        Table sourceTable = tEnv.from("kafka_source");

        sourceTable.insertInto("mysql_sink");

        tEnv.execute("Fink Hello World");
    }
}
