package com.test.ip;


import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.MemorySize;
import org.apache.flink.configuration.TaskManagerOptions;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.catalog.hive.HiveCatalog;

/**
 * @create: 2023-07-13 14:28
 * @author: Mr.Du
 * --------------
 * @notes:
 **/
public class LoginIpProvinceDemo {
    public static void main(String[] args) {

        System.setProperty("HADOOP_USER_NAME","root");

        EnvironmentSettings settings = EnvironmentSettings
                .newInstance()
                .inBatchMode()
                .build();
        Configuration conf = settings.toConfiguration();
        conf.set(TaskManagerOptions.MANAGED_MEMORY_SIZE, MemorySize.ofMebiBytes(256));
        Test2.initial(conf);
        TableEnvironment tableEnv = TableEnvironment.create(settings);


        // 创建hive的catalog
        String catalogName = "myhive";
        HiveCatalog hiveCatalog = new HiveCatalog(
                catalogName,
                "default",
                "src/main/resources",
                "3.1.2"
        );
        //注册catalog
        tableEnv.registerCatalog(catalogName,hiveCatalog);
        tableEnv.useCatalog(catalogName);
        //Q3 ip地址求省份
        //ipProvinceParse(tableEnv);

        //Q4 每个省份的去重人数
        getDtDistinctCnt(tableEnv);

        //Q5 登录最早的3个人
        //getEarliestTop3Province(tableEnv);
    }

    private static void getEarliestTop3Province(TableEnvironment tableEnv) {
        tableEnv.createTemporaryFunction("collect_list", CollectionList.class);
        tableEnv.sqlQuery("SELECT t.province, \n" +
                "    COLLECT_LIST(cast(t.account_id as string))[1] as account_id_1, \n" +
                "    COLLECT_LIST(cast(t.logtime as string))[1] as login_time_1, \n" +
                "    COLLECT_LIST(cast(t.account_id as string))[2] as account_id_2, \n" +
                "    COLLECT_LIST(cast(t.logtime as string))[2] as login_time_2, \n" +
                "    COLLECT_LIST(cast(t.account_id as string))[3] as account_id_3, \n" +
                "    COLLECT_LIST(cast(t.logtime as string))[3] as login_time_3\n" +
                "FROM (\n" +
                "    SELECT province, account_id, logtime, \n" +
                "    ROW_NUMBER() OVER(PARTITION BY province order by logtime ASC) as rownum\n" +
                "    FROM test.login_data_province_dt ) t\n" +
                "    WHERE t.rownum <=3\n" +
                "    GROUP BY t.province");
    }

    private static void getDtDistinctCnt(TableEnvironment tableEnv) {
        TableResult result = tableEnv.executeSql(
                "SELECT dt,province,count(distinct account_id) as cnt_login \n" +
                "FROM test.login_data_province_dt \n" +
                "GROUP BY dt, province");
        result.print();
    }

    private static void ipProvinceParse(TableEnvironment tableEnv) {
        //tableResult
        tableEnv.createTemporaryFunction("get_long_ip", LongIp.class);

        tableEnv.executeSql("insert into test.login_data_province_dt \n" +
                "select t1.logtime, t1.account_id, t1.ip, t2.province, substring(cast(logtime as string),0,10) as dt \n" +
                "from test.login_data_dt t1 \n" +
                "left join test.ip_china_sequence t2 \n" +
                "ON (get_long_ip(t1.ip) BETWEEN t2.long_ip_start AND t2.long_ip_end)");
    }
}
