package com.zyx.ip2region.hiveudf;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;

/**
 * @author zyx
 * @since 2021/9/1 21:37
 * desc: 给定一个基本数据类型, 返回一个长度
 */
public class LengthUdf extends GenericUDF {

    private static InputStream in;
    private static byte[] data;
    public static final String DB_PATH = "hdfs:///user/hive/jars/ip2region.db";

    static {
        // 加载数据
        ByteArrayOutputStream out = null;
        try {
            Configuration configuration = new Configuration();
            FileSystem fileSystem = FileSystem.get(URI.create(DB_PATH), configuration);
            in = fileSystem.open(new Path(DB_PATH));
            out = new ByteArrayOutputStream();
            byte[] buffer = new byte[1024];
            while (in.read(buffer) != -1) {
                out.write(buffer);
            }
            // 提高性能, 将ip2region.db一次从hdfs中读取出来, 缓存到data字节数组中以重用
            // 避免每来一条数据读取一次ip2region.db
            data = out.toByteArray();
            out.close();
            in.close();
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            try {
                if (out != null) {
                    try {
                        out.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
                if (in != null) {
                    in.close();
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }

    @Override
    public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
        // 判断参数个数是否正确
        if(arguments.length !=1){
            throw  new UDFArgumentLengthException("args num error please give me only one arg");
        }
        // 判断参数的类型是否正确
        if(!arguments[0].getCategory().equals(ObjectInspector.Category.PRIMITIVE)){
            throw  new UDFArgumentTypeException(1,"args type error please give me PRIMITIVE type");
        }
        return PrimitiveObjectInspectorFactory.javaIntObjectInspector;
    }

    @Override
    public Object evaluate(DeferredObject[] arguments) throws HiveException {
        Object o = arguments[0].get();
        if (o==null){
            return 0;
        }
        return o.toString().length() + data.length;
    }

    @Override
    public String getDisplayString(String[] strings) {
        return "";
    }

}
