package com.test.ip;

import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.catalog.hive.HiveCatalog;
import org.apache.flink.table.functions.AggregateFunction;

import java.util.ArrayList;
import java.util.List;

/**
 * @create: 2023-07-13 23:43
 * --------------
 * @notes:
 **/
public class Q5 {
    public static void main(String[] args) {
        System.setProperty("HADOOP_USER_NAME","root");
        EnvironmentSettings settings = EnvironmentSettings
                .newInstance()
                .inBatchMode()
                .build();
        TableEnvironment tableEnv = TableEnvironment.create(settings);


        // 创建hive的catalog
        String catalogName = "myhive";
        HiveCatalog hiveCatalog = new HiveCatalog(
                catalogName,
                "default",
                "src/main/resources",
                "3.1.2"
        );
        //注册catalog
        tableEnv.registerCatalog(catalogName,hiveCatalog);
        tableEnv.useCatalog(catalogName);
        getEarliestTop3Province(tableEnv);
    }

    private static void getEarliestTop3Province(TableEnvironment tableEnv) {
        tableEnv.createTemporaryFunction("collect_list", CollectionList.class);
        TableResult result = tableEnv.executeSql("SELECT t.province, \n" +
                "    COLLECT_LIST(cast(t.account_id as string))[1] as account_id_1, \n" +
                "    COLLECT_LIST(cast(t.logtime as string))[1] as login_time_1, \n" +
                "    COLLECT_LIST(cast(t.account_id as string))[2] as account_id_2, \n" +
                "    COLLECT_LIST(cast(t.logtime as string))[2] as login_time_2, \n" +
                "    COLLECT_LIST(cast(t.account_id as string))[3] as account_id_3, \n" +
                "    COLLECT_LIST(cast(t.logtime as string))[3] as login_time_3\n" +
                "FROM (\n" +
                "    SELECT province, account_id, logtime, \n" +
                "    ROW_NUMBER() OVER(PARTITION BY province order by logtime ASC) as rownum\n" +
                "    FROM test.login_data_province_dt ) t\n" +
                "    WHERE t.rownum <=3\n" +
                "    GROUP BY t.province");
        result.print();
    }
}

