package com.doitedu.doit30.hbase.load;

import com.alibaba.fastjson.JSON;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;

import java.io.IOException;

/**
 * @Date 22.6.12
 * @Created by HANGGE
 * @Description 将json格式的电影评分数据   导入到hbase表中
 * 1 加载数据
 * 2 转换数据
 * 3 写到表中  [hfile格式]
 */
public class Data2Hbase {
    /**
     * 加载每行数据
     * 将每行数据解析  创建行键 写出去
     */
    static class HbaseMapper extends Mapper<LongWritable, Text, Text, MovieBean> {
        Text k = new Text();

        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            try {
                String line = value.toString();
                MovieBean movieBean = JSON.parseObject(line, MovieBean.class);
                String movie = movieBean.getMovie(); // 6
                String timeStamp = movieBean.getTimeStamp();  //10
                //创建行键
                String rk = StringUtils.leftPad(movie, 6, '0') + "_" + StringUtils.leftPad(timeStamp, 10, '0');
                k.set(rk);
                context.write(k, movieBean);
            } catch (Exception e) { //  最大异常
                e.printStackTrace();
            }
        }
    }

    /**
     * 专门向表中写数据的reducer类
     */
    static  class  HbaseReducer extends TableReducer<Text, MovieBean, ImmutableBytesWritable>{
        @Override
        protected void reduce(Text key, Iterable<MovieBean> values, Context context) throws IOException, InterruptedException {
            String rk = key.toString();
            MovieBean movieBean = values.iterator().next();

            Put put = new Put(Bytes.toBytes(rk));

            String movie = movieBean.getMovie();
            double rate = movieBean.getRate();
            String timeStamp = movieBean.getTimeStamp();
            String uid = movieBean.getUid();
            put.addColumn(Bytes.toBytes("cf") ,Bytes.toBytes("movie") , Bytes.toBytes(movie)) ;
            put.addColumn(Bytes.toBytes("cf") ,Bytes.toBytes("rate") , Bytes.toBytes(rate)) ;
            put.addColumn(Bytes.toBytes("cf") ,Bytes.toBytes("timeStamp") , Bytes.toBytes(timeStamp)) ;
            put.addColumn(Bytes.toBytes("cf") ,Bytes.toBytes("uid") , Bytes.toBytes(uid)) ;

            context.write(null , put);

        }
    }


    public static void main(String[] args) throws Exception {

        //  1 表名
        //  2 hbase的地址

        Configuration conf = HBaseConfiguration.create();
        // 设置hbase的地址
        conf.set("hbase.zookeeper.quorum", "linux01:2181,linux02:2181,linux03:2181");
        Job job = Job.getInstance(conf);
        job.setMapperClass(HbaseMapper.class);
        // mapper 类
        // mapper的输出类型
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(MovieBean.class);

        //
       // job.setInputFormatClass();

        //输入路径
        FileInputFormat.setInputPaths(job ,"E:\\mrdata\\movie\\input");

        TableMapReduceUtil.initTableReducerJob("tb_movie" , HbaseReducer.class,job);
        job.waitForCompletion(true) ;

    }

}
