package org.example;

import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class FlinkBatchInsetUpdateDelete1 {
    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        conf.setString(RestOptions.BIND_PORT,"8081");
        TableEnvironment tableEnv = TableEnvironment.create(conf);

        // 创建Paimon catalog
        tableEnv.executeSql("CREATE CATALOG paimon WITH (\n" +
                "    'type' = 'paimon',\n" +
                "    'warehouse' = 'file:///tmp/paimon'\n" +
                ");");

        tableEnv.executeSql("DROP TABLE IF EXISTS `paimon`.`default`.`user_behavior_with_pk`;");

        tableEnv.executeSql("-- 创建paimon主键表\n" +
                "CREATE TABLE `paimon`.`default`.`user_behavior_with_pk` (\n" +
                "    user_id STRING,\n" +
                "    item_id STRING,\n" +
                "    behavior BIGINT,\n" +
                "    dt STRING,\n" +
                "    eventtime TIMESTAMP_LTZ(3),\n" +
                "    WATERMARK FOR eventtime AS eventtime - INTERVAL '5' SECOND,\n" +
                "    PRIMARY KEY (dt, user_id) NOT ENFORCED\n" +
                ") \n" +
                "PARTITIONED BY (dt)\n" +
                ";\n");

        tableEnv.executeSql("-- 插入一条数据\n" +
                "INSERT INTO `paimon`.`default`.`user_behavior_with_pk` partition(dt='2025-01-13') \n" +
                "(user_id, item_id, behavior, eventtime)\n" +
                "VALUES('00001', '00001插入测试', 1, TO_TIMESTAMP('2025-01-13 16:54:42:000'))\n" +
                ";");
    }
}