package cn.doitedu.hbase.hbasemr;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;

/**
 * @date: 2019/7/14
 * @site: www.doitedu.cn
 * @author: hunter.d 涛哥
 * @qq: 657270652
 * @description:  统计用户评分最高的n条评分记录
 * 从 hbase的表：rating读
 * 将结果写入hbase的表：topn
 */
public class Hbase2Hbase {


    // 继承Hbase提供的TableMapper，唯一的用处就是，定死KEYIN,VALUEIN的类型：immutableBytesWritable,Result
    public static class M extends TableMapper<Text,RateBean> {


        final byte[] F = "f".getBytes();
        RateBean b = new RateBean();
        Text k = new Text();

        @Override
        protected void map(ImmutableBytesWritable key, Result value, Context context) throws IOException, InterruptedException {
            Counter counter = context.getCounter("zangshuju", "int_erro");

            try {
                byte[] a = value.getValue(F, "a".getBytes());
                byte[] g = value.getValue(F, "g".getBytes());
                byte[] m = value.getValue(F, "m".getBytes());
                byte[] r = value.getValue(F, "r".getBytes());
                byte[] s = value.getValue(F, "s".getBytes());

                byte[] rowkeyBytes = key.copyBytes();
                String rk = new String(rowkeyBytes);

                String movieId = rk.substring(0, 6);
                String uid = rk.substring(rk.length() - 6);

                k.set(uid);
                b.set(uid, Integer.parseInt(new String(a)), new String(g), movieId, new String(m), new String(s), Integer.parseInt(new String(r)));

                context.write(k, b);
            }catch (Exception e){
                counter.increment(1);

            }
        }
    }

    public static class R extends TableReducer<Text,RateBean,ImmutableBytesWritable> {


        @Override
        protected void reduce(Text key, Iterable<RateBean> values, Context context) throws IOException, InterruptedException {

            ArrayList<RateBean> lst = new ArrayList<>();



            // 迭代数据放入list缓存
            for (RateBean b : values) {

                RateBean newBean = new RateBean();
                newBean.set(b.getUid(),b.getAge(),b.getGender(),b.getMovieId(),b.getMovieName(),b.getStyle(),b.getRate());

                lst.add(newBean);

            }

            // 排序
            Collections.sort(lst, new Comparator<RateBean>() {
                @Override
                public int compare(RateBean o1, RateBean o2) {

                    return o2.getRate()-o1.getRate();
                }
            });


            //  取评分最高的topn条评分记录
            int topn = 5;
            for(int i=0;i<Math.min(topn,lst.size());i++){

                // 用uid+movied作为rowkey，  用整个评分记录bean的toString结果作为value
                RateBean rateBean = lst.get(i);
                String uid = rateBean.getUid();
                String mid = rateBean.getMovieId();


                // 将要输出的结果封装为一个put对象
                Put put = new Put((uid + "_" + mid).getBytes());
                put.addColumn("f".getBytes(),"c".getBytes(),rateBean.toString().getBytes());

                context.write(null,put);
            }

        }
    }

    public static void main(String[] args) throws Exception {

        Configuration conf = new Configuration();
        conf.set("hbase.zookeeper.quorum","spark01:2181,spark02:2181,spark03:2181");
        conf.set("mapreduce.map.tasks","5");

        Job job = Job.getInstance(conf);

        job.setJarByClass(UserRateTopn.class);


        // 设置了mapper为hbase所提供的mapper，以及Inputformat为Hbase所提供的TableInputFormat
        Scan scan = new Scan();
        TableMapReduceUtil.initTableMapperJob("rating",scan, UserRateTopn.M.class,Text.class,RateBean.class,job);


        job.setNumReduceTasks(1);
        TableMapReduceUtil.initTableReducerJob("topn",R.class,job);

        job.waitForCompletion(true);

    }


}
