package com.dada.cn.paimon;

import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.runtime.state.memory.MemoryStateBackend;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class BatchWrite {


    public static void main(String[] args) {
        Configuration conf = new Configuration();
        conf.setInteger("heartbeat.timeout", 5000000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.setRuntimeMode(RuntimeExecutionMode.STREAMING);
        env.setStateBackend(new FsStateBackend("file:/opt/soft/checkpoint/paimon"));
        env.enableCheckpointing(60000);

        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);

        String catalog = "CREATE CATALOG paimon_file WITH (\n" +
                "  'type' = 'paimon',\n" +
                "  'warehouse' = 'file:///opt/soft/paimon'\n" +
                ");";
        String useCatalog = "USE CATALOG paimon_file;\n";
        String deleteTable = "DROP TABLE IF EXISTS test;\n";
        String createTable = "CREATE TABLE IF NOT EXISTS test (\n" +
                "    user_id BIGINT,\n" +
                "    item_id BIGINT,\n" +
                "    behavior STRING,\n" +
                "    dt STRING,\n" +
                "    hh STRING,\n" +
                "    PRIMARY KEY (user_id) NOT ENFORCED\n" +
                ")" +
                "WITH (\n" +
                "    'bucket' = '1',\n" +
                "    'bucket-key' = 'user_id', \n"+
//                "    'write-only' = 'true', \n"+
                "    'snapshot.num-retained.min' = '1', \n" +
                "    'snapshot.num-retained.max' = '2', \n" +
                "    'snapshot.time-retained' = '1s', \n" +
                "    'changelog-producer' = 'input', \n"+
//                "    'changelog-producer.row-deduplicate' = 'true', \n"+
                "    'continuous.discovery-interval' = '1s',\n "+
                "    'file.format' = 'parquet' \n"+
                ") ;";
//        String insert = "INSERT INTO test VALUES(1,1,'order','2023-07-01','1'), (2,2,'pay','2023-07-01','2');";

//        String insert = "INSERT INTO test VALUES(1,1,'order','2023-07-01','1'), (2,2,'pay','2023-07-01','2');";
//        String insert = "INSERT INTO test VALUES(3,3,'order','2023-07-01','3');";
//        String insert = "INSERT INTO test VALUES(4,4,'order','2023-07-01','4');";
//        String insert = "INSERT INTO test VALUES(5,5,'order','2023-07-01','5');";
//        String insert = "INSERT INTO test VALUES(6,7,'order','2023-07-01','7');";
//        String insert = "update test set item_id=7 where user_id=6;";


        String insert = "INSERT into test VALUES(1,1,'order','2023-07-01','1');";


//        String insert2 = "INSERT into test VALUES(1,1,'order','2023-07-01','1'), (2,2,'pay','2023-07-01','2'), (2,2,'pay','2023-07-01','2'),(3,3,'pay','2023-07-01','2');";

//        String insert3 = "INSERT into test VALUES(4,4,'order','2023-07-01','4');";

        tEnv.executeSql(catalog);
        tEnv.executeSql(useCatalog);
        tEnv.executeSql(deleteTable);
        tEnv.executeSql(createTable);
        tEnv.executeSql(insert);
//        tEnv.executeSql(insert2);
//        tEnv.executeSql(insert3);

    }

}
