package hbase.mapReduce;

import hbase.common.MyPut;
import hbase.operation.DataOperation;
import hbase.operation.TableOperation;
import hbase.tools.Constants;
import hbase.tools.HbaseUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;

public class HBaseWordCountApp {

    private static Configuration conf = null;
    private static Connection conn = null;
    private static Admin admin = null;
    private static Random ramdom = new Random();

    static{
        try {
            conf = HBaseConfiguration.create();
            conf.set(Constants.ZK_QUORUM,Constants.ZK_QUORUM_NODES);
            conf.set(Constants.ZK_CLIENT_PORT,Constants.ZK_PORT);

            conn = ConnectionFactory.createConnection(conf);
            admin = conn.getAdmin();
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }

    public static class HWCMapper extends TableMapper<Text, IntWritable>{

        Text outKey = new Text();
        IntWritable outValue = new IntWritable(1);

        @Override
        protected void map(ImmutableBytesWritable key, Result value, Mapper<ImmutableBytesWritable, Result, Text, IntWritable>.Context context) throws IOException, InterruptedException {
            //读取HBase中 src 表中 data列族下的words列的数据，把数据变成字符串类型
            String line = new String(value.getValue(Bytes.toBytes(Constants.SRC_COLUMNFAMILY),Bytes.toBytes(Constants.SRC_COLUMN)));

            String[] words = line.split(",");

            for (String word : words) {
                //通过计数器统计每个单词的数量，与最后的结果做对比
                context.getCounter("wordCount",word).increment(1);
                outKey.set(word);
                context.write(outKey,outValue);
            }
        }
    }

    public static class HWCReducer extends TableReducer<Text,IntWritable,ImmutableBytesWritable>{

        int count = 0;

        ImmutableBytesWritable outKey = new ImmutableBytesWritable();

        @Override
        protected void reduce(Text key, Iterable<IntWritable> values, Reducer<Text, IntWritable, ImmutableBytesWritable, Mutation>.Context context) throws IOException, InterruptedException {

            int sum = 0;
            for (IntWritable value : values) {
                sum += value.get();
            }

            String rowKey = String.format("%02d",++count);
            //把当前组的key充当目标数据的 rowkey
            outKey.set(Bytes.toBytes(rowKey));
            //创建一个Put类型对象
            MyPut put = new MyPut(rowKey);
            //把当前组的value综合充当目标数据的value值，添加到 dest 表中的 result 列族下的 count 列
            put.addColumn(Constants.DEST_COLUMNFAMILY,Constants.DEST_COLUMN_WORD,key.toString());
            put.addColumn(Constants.DEST_COLUMNFAMILY,Constants.DEST_COLUMN_COUNT,sum+"");

            context.write(outKey,put);
        }
    }

    /**
     * 初始化原始表和原始数据，以及目标表
     */
    public static void initTableAndData(){
        Table table = null;
        try{
            table = conn.getTable(TableName.valueOf(Constants.SRC_TABLENAME));
            //判断SRC表是否存在，如果不存在，建表同时生成100条测试数据
            if(HbaseUtils.isTableNotExists(admin,Constants.SRC_TABLENAME)){
                //创建 SRC 表，表中有 data 列族
                TableOperation.createTable(Constants.SRC_TABLENAME,false,Constants.SRC_COLUMNFAMILY);
                //同时生成100条测试数据
                List<Put> putList = generateData();
                DataOperation.addOrModifyMoreData(Constants.SRC_TABLENAME,false,putList);
            }else{
                //SRC表存在，但数据不存在，创建数据
                if(HbaseUtils.isDataNotExists(table)){
                    List<Put> putList = generateData();
                    DataOperation.addOrModifyMoreData(Constants.SRC_TABLENAME,false,putList);
                }else{
                    System.out.println("表和数据都存在，无需操作");
                }
            }

            table = conn.getTable(TableName.valueOf(Constants.DEST_TABLENAME));
            //如果 DEST 表不存在，创建表
            if(HbaseUtils.isTableNotExists(admin,Constants.DEST_TABLENAME)){
                //创建 DEST 表，表中有 result 列族
                TableOperation.createTable(Constants.DEST_TABLENAME,false,Constants.DEST_COLUMNFAMILY);
            }else{
                //如果DEST表存在，且有数据，清空表数据，保留表结构
                if(HbaseUtils.isDataExists(table)){
                    DataOperation.deleteTableData(Constants.DEST_TABLENAME,false);
                }else{
                    System.out.println("表存在，且没有数据，无需操作");
                }
            }
        }catch (Exception e){
            e.printStackTrace();
        }finally {
            HbaseUtils.closed(conn,admin,table);
        }
    }

    /**
     * 向原始表中添加100条测试数据
     * @return
     */
    private static List<Put> generateData(){

        List<Put> putList = new ArrayList<>(100);

        //添加100条数据
        for (int i = 1; i <= 100 ; i++) {
            MyPut put = new MyPut(String.format("%03d",i));
            //一行内容为 5-10 个单词(这里是字母)
            StringBuilder sb = new StringBuilder();
            int tmp=ramdom.nextInt(6)+5;
            for (int j = 0; j < tmp ; j++) {
                sb.append((char)(ramdom.nextInt(26)+97) + (j==tmp-1?"":","));
            }

            put.addColumn(Constants.SRC_COLUMNFAMILY,Constants.SRC_COLUMN,sb.toString());
            putList.add(put);
        }
        return putList;
    }

    public static void main(String[] args) throws Exception {
        //先初始化表和数据
        initTableAndData();

        Job job = Job.getInstance(conf);
        TableMapReduceUtil.initTableMapperJob(
                Constants.SRC_TABLENAME,
                new Scan(),
                HWCMapper.class,
                Text.class,
                IntWritable.class,
                job
        );

        TableMapReduceUtil.initTableReducerJob(
                Constants.DEST_TABLENAME,
                HWCReducer.class,
                job
        );

        job.waitForCompletion(true);

    }
}
