package qdu.edu.com.fushanf4.service.count.usertype;

import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Partitioner;

public class ClassificationOfProductDescriptionsByUserTypePartitioner extends Partitioner<Text, Text> {
    private final static String[] usersType = new String[]{""
            ,"买鞋控","烹饪达人","吃货","时尚靓妹","家庭主妇","美丽教主","数码达人","运动一族","白富美","爱包人"
            ,"职场办公","收纳达人","速食客","养生专家","户外一族","阅读者","学霸","休闲大咖","有型潮男","高富帅"
            ,"二手买家","健美一族","爱听音乐","花卉一族","动漫迷","游戏人生","情趣一族","酒品人生","追风骑士","收藏家"
            ,"绘画家","摄影一族","旅行者","舞林人士","乐器迷","网络一族","商家会","书法家","电影派","果粉"
            ,"理财专家"
    };

    @Override
    public int getPartition(Text text, Text text2, int i) {
        for (int j = 1; j<usersType.length; j++) {
        // 实际运行时发现，Bytes.toString(test.getBytes())生成的字符串中带有多个"\u0000"，导致equals返回值为false，进而导致Partitioner失败
        if (usersType[j].equals(Bytes.toString(text.getBytes()).replaceAll("\u0000",""))) {
                return j;
            }
        }
        return 0;
    }
}
