package com.test.ip;

import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;

/**
 * @create: 2023-07-13 23:39
 * --------------
 * @notes: ip解析
 **/
public class Q3 {
    public static void main(String[] args) {
        System.setProperty("HADOOP_USER_NAME","root");

        EnvironmentSettings settings = EnvironmentSettings
                .newInstance()
                .inBatchMode()
                .build();
        TableEnvironment tableEnv = TableEnvironment.create(settings);


        // 创建hive的catalog
        String catalogName = "myhive";
        HiveCatalog hiveCatalog = new HiveCatalog(
                catalogName,
                "default",
                "src/main/resources",
                "3.1.2"
        );
        //注册catalog
        tableEnv.registerCatalog(catalogName,hiveCatalog);
        tableEnv.useCatalog(catalogName);

        ipProvinceParse(tableEnv);
    }

    private static void ipProvinceParse(TableEnvironment tableEnv) {
        //tableResult
        tableEnv.createTemporaryFunction("get_long_ip", LongIp.class);

        tableEnv.executeSql("insert into test.login_data_province_dt \n" +
                "select t1.logtime, t1.account_id, t1.ip, t2.province, substring(cast(logtime as string),0,10) as dt \n" +
                "from test.login_data_dt t1 \n" +
                "left join test.ip_china_sequence t2 \n" +
                "ON (get_long_ip(t1.ip) BETWEEN t2.long_ip_start AND t2.long_ip_end)");
    }
}
