import org.apache.commons.codec.digest.DigestUtils;
import org.apache.hadoop.hive.ql.exec.UDF;

import java.util.List;

/**
 * @description: some desc
 * @author: jiashen
 * @date: 2021/10/16 下午11:11
 */
public class AddSalt extends UDF {

    public String evaluate(String key, Integer regionNum) {

        String rowKey = addSalt(key, regionNum);

        return rowKey;

    }

    /**
     * hbase加盐规则 （加盐字段做md5取hashcode再与预分区数取模 + "|"）
     * 创建预分区表方式(压缩非必须,分区文件需要自己生成)
     * create 'hbase表名', {NAME => 'info', COMPRESSION => 'ZSTD', DATA_BLOCK_ENCODING =>'DIFF'},SPLITS_FILE => '/home/hadoop/alihbase-2.0.18/splits/splits.txt'
     * **/
    public static String addSalt(String key, Integer regionNum) {
        return String.format("%0"+regionNum.toString().length()+"d",Math.abs(hashCode(DigestUtils.md5Hex(key)) % regionNum)) + "|";
    }
    /**
     * 获取hashcode
     * s[0]*31^(n-1) + s[1]*31^(n-2) + ... + s[n-1]
     * **/
    public static int hashCode(String value) {
        int hash = 0;
        if (value != null && value.length() > 0) {
            char val[] = value.toCharArray();
            for (int i = 0; i < val.length; i++) {
                hash = 31 * hash + val[i];
            }
        }
        return hash;
    }
}
