package com.flink.streaming.sql.validation;


import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;
import org.apache.flink.types.Row;
import org.apache.flink.util.CloseableIterator;

import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import java.util.concurrent.CompletableFuture;


@Slf4j
public class SqlSelect {

    /**
     * @author liuyun
     * @date 2022/07/26
     * @time 10:10
     */
    @SneakyThrows
    public static Map<String, Object> sqlSelect(String sql) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        HiveCatalog catalog = new HiveCatalog(
                "myhive",
                "flink",
                "/usr/hdp/3.0.1.0-187/hive/conf",
                "/usr/hdp/3.0.1.0-187/hive/conf",
                null

        );
        tableEnv.registerCatalog("myhive", catalog);
        tableEnv.useCatalog("myhive");

        TableResult tableResult1 = tableEnv.executeSql(sql);
        CloseableIterator<Row> collect = tableResult1.collect();
        Map<String, Object> map = new HashMap<>();
        map.put("jobId",tableResult1.getJobClient().get().getJobID());
        Vector v1 = new Vector();
        CompletableFuture.runAsync(() -> {
            while (collect.hasNext()) {
                v1.add(collect.next());
            }
        });
        Thread.sleep(5000);
        map.put("data",v1);
        collect.close();
        return map;
    }
}
