package com.example.sql;

import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;

import static org.apache.flink.table.api.Expressions.$;

/**
 * Author wangJinLong
 * Date 2025/8/5 15:50
 **/
public class FlinkSqlKafka {

    private final static String topic = "data-user";
    private final static String topic_csv = "data-user-csv";

    public static void main(String[] args) throws Exception {
        extracted3();

    }

    private static void extracted3() {
        EnvironmentSettings settings = EnvironmentSettings.inStreamingMode();
        TableEnvironment tEnv = TableEnvironment.create(settings);

        tEnv.executeSql("CREATE TABLE per_siteSn (\n" +
                "  featuresId STRING,\n" +
                "  siteSn STRING,\n" +
                "  uv BIGINT,\n" +
                "  PRIMARY KEY (featuresId, siteSn) NOT ENFORCED\n" +
                ") WITH (\n" +
                "  'connector' = 'upsert-kafka',\n" +
                "  'topic' = 'per_siteSn',\n" +
                "  'properties.bootstrap.servers' = '106.54.174.109:19092',\n" +
                "  'properties.allow.auto.create.topics' = 'true',\n" +
                "  'key.format' = 'json',\n" +
                "  'value.format' = 'json'\n" +
                ")");

        tEnv.executeSql("CREATE TABLE transactions (\n" +
                "    featuresId  STRING,\n" +
                "    siteSn      STRING,\n" +
                "    captureTime BIGINT\n" +
                ") WITH (\n" +
                "    'connector' = 'kafka',\n" +
                "    'topic'     = 'data-user',\n" +
                "    'scan.startup.mode'     = 'earliest-offset',\n" +
                "    'properties.bootstrap.servers' = '106.54.174.109:19092',\n" +
                "    'value.format'    = 'json',\n" +
                "    'properties.group.id'    = 'group1'\n" +
                ")");

        tEnv.executeSql("INSERT INTO per_siteSn\n" +
                "SELECT\n" +
                "  featuresId,\n" +
                "  siteSn," +
                "  COUNT(*)\n" +
                "FROM transactions\n" +
                "GROUP BY featuresId,siteSn");

        tEnv.executeSql("CREATE TABLE read_per_siteSn (\n" +
                "    featuresId  STRING,\n" +
                "    siteSn      STRING,\n" +
                "    uv BIGINT\n" +
                ") WITH (\n" +
                "    'connector' = 'kafka',\n" +
                "    'topic'     = 'per_siteSn',\n" +
                "    'scan.startup.mode'     = 'latest-offset',\n" +
                "    'properties.bootstrap.servers' = '106.54.174.109:19092',\n" +
                "    'value.format'    = 'json',\n" +
                "    'properties.group.id'    = 'group1'\n" +
                ")");

        tEnv.executeSql("select * from read_per_siteSn").print();
    }

    /**
     * 读取mysql
     */
    private static void extracted2() {
        EnvironmentSettings settings = EnvironmentSettings.inStreamingMode();
        TableEnvironment tEnv = TableEnvironment.create(settings);

        tEnv.executeSql("CREATE TABLE spend_report (\n" +
                "    featuresId STRING,\n" +
                "    siteSn     STRING,\n" +
                "    captureTime     STRING\n" +
                ") WITH (\n" +
                "   'connector'  = 'jdbc',\n" +
                "   'url'        = 'jdbc:mysql://106.54.174.109:13306/test',\n" +
                "   'table-name' = 'spend_report',\n" +
                "   'driver'     = 'com.mysql.cj.jdbc.Driver',\n" +
                "   'username'   = 'root',\n" +
                "   'password'   = 'mysql@123456'\n" +
                ")");

        tEnv.executeSql("select * from spend_report").print();
    }

    /**
     * 读取kafka 写入 mysql
     */
    private static void extracted1() {
        EnvironmentSettings settings = EnvironmentSettings.inStreamingMode();
        TableEnvironment tEnv = TableEnvironment.create(settings);

        tEnv.executeSql("CREATE TABLE transactions (\n" +
                "    featuresId  STRING,\n" +
                "    siteSn      STRING,\n" +
                "    captureTime BIGINT\n" +
                ") WITH (\n" +
                "    'connector' = 'kafka',\n" +
                "    'topic'     = 'data-user',\n" +
                "    'scan.startup.mode'     = 'earliest-offset',\n" +
                "    'properties.bootstrap.servers' = '106.54.174.109:19092',\n" +
                "    'value.format'    = 'json',\n" +
                "    'properties.group.id'    = 'group1'\n" +
                ")");

        tEnv.executeSql("CREATE TABLE spend_report (\n" +
                "    featuresId STRING,\n" +
                "    siteSn     STRING,\n" +
                "    captureTime     BIGINT\n," +
                "    PRIMARY KEY (featuresId) NOT ENFORCED" +
                ") WITH (\n" +
                "   'connector'  = 'jdbc',\n" +
                "   'url'        = 'jdbc:mysql://106.54.174.109:13306/test',\n" +
                "   'table-name' = 'spend_report',\n" +
                "   'driver'     = 'com.mysql.cj.jdbc.Driver',\n" +
                "   'username'   = 'root',\n" +
                "   'password'   = 'mysql@123456'\n" +
                ")");

        Table transactions = tEnv.from("transactions");
        report(transactions).executeInsert("spend_report");
    }

    public static Table report(Table transactions) {
        return transactions.select(
                        $("featuresId"),
                        $("siteSn"),
                        $("captureTime"));
    }

    /**
     * 读取kafka
     */
    private static void extracted() {
        EnvironmentSettings settings = EnvironmentSettings.inStreamingMode();
        TableEnvironment tEnv = TableEnvironment.create(settings);

        tEnv.executeSql("CREATE TABLE transactions (\n" +
                "    featuresId  STRING,\n" +
                "    siteSn      STRING,\n" +
                "    captureTime BIGINT\n" +
                ") WITH (\n" +
                "    'connector' = 'kafka',\n" +
                "    'topic'     = 'data-user',\n" +
                "    'scan.startup.mode'     = 'earliest-offset',\n" +
                "    'properties.bootstrap.servers' = '106.54.174.109:19092',\n" +
                "    'value.format'    = 'json',\n" +
                "    'properties.group.id'    = 'group1'\n" +
                ")");

        tEnv.executeSql("select * from transactions").print();
    }
    

}
