package com.dada.cn.paimon;

import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.TableConfig;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class k2p {

    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.setRuntimeMode(RuntimeExecutionMode.STREAMING);
//        env.setRuntimeMode(RuntimeExecutionMode.BATCH);
        env.enableCheckpointing(60000);
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        TableConfig tableConfig = tEnv.getConfig();

        tableConfig.set("table.exec.sink.upsert-materialize", "NONE");

//        String jdbcCatalog = "CREATE CATALOG my_catalog WITH(\n" +
//                "    'type' = 'jdbc',\n" +
//                "    'default-database' = 'test',\n" +
//                "    'username' = 'root',\n" +
//                "    'password' = 'dongpeng',\n" +
//                "    'base-url' = 'jdbc:mysql://127.0.0.1:3306'\n" +
//                ");";

                String memCatalog = "CREATE CATALOG my_catalog WITH(\n" +
                "    'type' = 'generic_in_memory',\n" +
                "    'default-database' = 'default'\n" +
                ");";

        String use = "USE CATALOG my_catalog";

        String src = "CREATE TABLE test_kafka(\n" +
                "    id BIGINT,\n" +
                "    name STRING,\n" +
                "    age BIGINT\n" +
                ") WITH (\n" +
                "    'connector' = 'kafka',\n" +
                "    'topic' = 'test',\n" +
                "    'properties.group.id' = 'test6',\n" +
                "    'properties.bootstrap.servers' = 'localhost:9092',\n" +
                "    'scan.startup.mode' = 'earliest-offset',\n" +
                "    'format' = 'debezium-json'\n" +
                ");";

        String catalog = "CREATE CATALOG paimon_file WITH (\n" +
                "  'type' = 'paimon',\n" +
                "  'warehouse' = 'file:///opt/soft/paimon'\n" +
                ");";
        String useCatalog = "USE CATALOG paimon_file;\n";

        String deleteTable = "DROP TABLE IF EXISTS test;\n";
        String sink = "CREATE TABLE IF NOT EXISTS test (\n" +
                "    id BIGINT,\n" +
                "    name STRING,\n" +
                "    age BIGINT,\n" +
                "    PRIMARY KEY (id) NOT ENFORCED\n" +
                ")" +
                "WITH (\n" +
                "    'bucket' = '1',\n" +
                "    'bucket-key' = 'id', \n"+
//                "    'write-only' = 'true', \n"+
                "    'snapshot.num-retained.min' = '1', \n" +
                "    'snapshot.num-retained.max' = '1', \n" +
                "    'snapshot.time-retained' = '10s', \n" +
                "    'changelog-producer' = 'input', \n"+
                "    'continuous.discovery-interval' = '10s'" +
                ");";

        String sql = "insert into paimon_file.`default`.test select * from my_catalog.`default`.test_kafka";

        tEnv.executeSql(memCatalog);
        tEnv.executeSql(use);
        tEnv.executeSql(src);

        tEnv.executeSql(catalog);
        tEnv.executeSql(useCatalog);
        tEnv.executeSql(deleteTable);


        tEnv.executeSql(sink);
        tEnv.executeSql(sql);
    }
}
