package org.example.append_table;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.TableResult;

/**
 * 案例介绍：
 * 1. 使用Flink Batch模式删除Paimon表中的特定日志记录
 * 2. 删除日志级别为'ERROR'的记录
 * 3. 打印删除前后的记录数
 */
public class UpdateAndDeleteRows {
    public static void main(String[] args) {
        // 配置Flink Web UI端口
        Configuration config = new Configuration();
        config.setString("rest.bind-port", "8081"); // 设置Web UI端口为8081
        
        // 创建Batch Table环境
        EnvironmentSettings settings = EnvironmentSettings.newInstance().inBatchMode().build();
        TableEnvironment tableEnv = TableEnvironment.create(settings);

        // 创建Paimon目录
        String createCatalogSQL = "CREATE CATALOG paimon WITH (\n" +
                "    'type' = 'paimon',\n" +
                "    'warehouse' = 'file:///tmp/paimon'\n" +
                ");";
        System.out.println("正在执行SQL: \n" + createCatalogSQL);
        tableEnv.executeSql(createCatalogSQL);

        // 查询删除前的记录数
        String countBeforeSQL = "SELECT COUNT(*) AS before_count FROM `paimon`.`default`.`LogTable`;";
        System.out.println("正在执行SQL: \n" + countBeforeSQL);
        TableResult beforeResult = tableEnv.executeSql(countBeforeSQL);
        beforeResult.print();

        // 执行DELETE语句
        String updateSQL = "UPDATE `paimon`.`default`.`LogTable` SET level = 'WARN' WHERE level = 'ERROR';";
        System.out.println("正在执行SQL: \n" + updateSQL);
        tableEnv.executeSql(updateSQL);

        // 查询删除后的记录数
        String countAfterSQL = "SELECT COUNT(*) AS after_count FROM `paimon`.`default`.`LogTable`;";
        System.out.println("正在执行SQL: \n" + countAfterSQL);
        TableResult afterResult = tableEnv.executeSql(countAfterSQL);
        afterResult.print();
    }
}
