package com.zyx.ip2region.hiveudf;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.lionsoul.ip2region.DbConfig;
import org.lionsoul.ip2region.DbSearcher;

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

/**
 * @author zyx
 * @since 2021/9/1 21:37
 * 1) 首先将 `ip2region.db` 置于HDFS指定路径, 例如 /user/hive/jars/ip2region.db
 * 2) 创建函数
 *  2.1 创建永久函数（指定库中创建）, 重启后任何时刻只要是查询该库 均可以使用
 *      hive > use your_db;
 *      hive > create function func_name as 'xxx.Ip2regionUdf' using jar 'hdfs:///user/hive/jars/xxx.jar';
 *  2.2 创建临时函数（可以在任意地方创建）, 退出当前会话后失效
 *      hive > create temporary function func_name as 'xxx.Ip2regionUdf' using jar 'hdfs:///user/hive/jars/xxx.jar';
 * 3) 如果创建永久函数需要重启hiveserver2
 * 4) 函数相关指令
 *  4.1 查看函数
 *      hive > desc function func_name;
 *  4.2 查看函数详细信息
 *      hive > desc function extended func_name;
 *  4.3 删除临时函数
 *      hive > drop temporary function func_name;
 *  4.4 删除永久函数
 *      hive > drop function func_name;
 * 5) 案例
 *      hive > select myip2region(""124.236.223.17");
 *      hive > select myip2region(""124.236'.223``.17");
 */
public class Ip2RegionUdf extends GenericUDF {

    private static InputStream in;
    private static byte[] data;
    private static ByteArrayOutputStream out = null;
    private static final String DB_PATH = "hdfs:///user/hive/jars/ip2region.db";
    private static final Pattern PATTERN = Pattern.compile("[0-9]*");

    static {
        // 加载数据
        try {
            // 读取HDFS上的ip2region.db数据
            Configuration configuration = new Configuration();
            FileSystem fileSystem = FileSystem.get(URI.create(DB_PATH), configuration);
            in = fileSystem.open(new Path(DB_PATH));
            out = new ByteArrayOutputStream();
            byte[] buffer = new byte[1024];
            while (in.read(buffer) != -1) {
                out.write(buffer);
            }

            // 提高性能, 将ip2region.db一次从hdfs中读取出来, 缓存到data字节数组中以重用
            // 避免每来一条数据读取一次ip2region.db
            data = out.toByteArray();

            out.close();
            in.close();
        } catch (Exception e) {

            e.printStackTrace();

        } finally {
            try {
                if (out != null) {
                    try {
                        out.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
                if (in != null) {
                    in.close();
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }

    /**
     * 初始化方法, 判断参数个数及类型是否正确, 并指定返回类型
     *
     * @param arguments 传入的参数
     * @return 返回参数的检查器
     * @throws UDFArgumentException UDF参数异常
     */
    @Override
    public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
        // 判断参数个数是否正确
        if (arguments.length != 1) {
            throw new UDFArgumentLengthException("args num error please give me only one arg");
        }
        // 判断参数的类型是否正确
        if (!arguments[0].getCategory().equals(ObjectInspector.Category.PRIMITIVE)) {
            throw new UDFArgumentTypeException(1, "args type error please give me PRIMITIVE type");
        }
        return PrimitiveObjectInspectorFactory.javaStringObjectInspector;
    }

    /**
     * Hive处理数据核心逻辑
     * @param arguments Hive函数传入的参数
     * @return 返回结果
     * @throws HiveException Hive异常
     */
    @Override
    public Object evaluate(DeferredObject[] arguments) throws HiveException {
        Object o = arguments[0].get();
        if (o == null) {
            return null;
        }
        String ip = o.toString();
        if (isIpAddress(ip)) {
            return getRegion(ip);
        } else {
            return null;
        }
    }

    @Override
    public String getDisplayString(String[] strings) {
        return "";
    }

    public static String getRegion(String ip) {
        DbSearcher searcher = null;
        String region = null;
        try {
            // 创建ip2region的searcher
            DbConfig config = new DbConfig();
            searcher = new DbSearcher(config, data);

            // 解析ip地址为指定区域
            region = searcher.memorySearch(ip).getRegion();
        } catch (Exception e) {

            e.printStackTrace();
        }

        return region;

    }

    /**
     * 重写ip2region的Util包中的isIpAddress方法
     *
     * @param ip 传入的字符串
     * @return 传入字符串是否为ip
     */
    public static boolean isIpAddress(String ip) {
        String[] p = ip.split("\\.");
        if (p.length != 4) {
            return false;
        }
        for (String pp : p) {
            if (pp.length() > 3) {
                return false;
            }
            // 添加判断是否为正整数的逻辑, 否则parseInt方法可能会报错
            if (!isPositiveInteger(pp)) {
                return false;
            }
            int val = Integer.parseInt(pp);
            if (val > 255) {
                return false;
            }
        }
        return true;
    }

    public static boolean isPositiveInteger(String str) {
        Matcher isNum = PATTERN.matcher(str);
        return isNum.matches();
    }

}
