package HiveUDF;

import org.apache.hadoop.hive.ql.exec.UDF;

/**
 * @program: EXAMPLE_Hive
 * @description: 存储底层数值类型
 * @author: CliffordChen
 * @create: 2021-04-12 14:34
 */
public class toBytes extends UDF {
    public String evaluate(Long value) {
        if(value==null){
            return "";
        }
        String string = Long.toHexString(value);
        int length = string.length();
        int i = 16 - length;
        for (int i1 = 0; i1 < i; i1++) {
            string = "0"+string;
        }
        StringBuilder stringBuilder = new StringBuilder();

        for (int i1 = 0; i1 < 8; i1++) {
            stringBuilder.append("\\x").append(Character.toUpperCase(string.charAt(2*i1))).append(Character.toUpperCase(string.charAt(2*i1+1)));
        }
        System.out.println(stringBuilder);
        return stringBuilder.toString();
    }

    public String evaluate(Integer value) {
        if(value==null){
            return "";
        }
        String string = Integer.toHexString(value);
        int length = string.length();
        int i = 8 - length;
        for (int i1 = 0; i1 < i; i1++) {
            string = "0"+string;
        }
        StringBuilder stringBuilder = new StringBuilder();

        for (int i1 = 0; i1 < 4; i1++) {
            stringBuilder.append("\\x").append(Character.toUpperCase(string.charAt(2*i1))).append(Character.toUpperCase(string.charAt(2*i1+1)));
        }
        return stringBuilder.toString();
    }

}