package com.briup.searchengine.handle;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.IOException;
import java.util.TreeSet;

/**
 * @author adam
 * @date 2022/6/13
 * 使用mr  实现索引倒置
 */
public class Step7_InvertIndex extends Configured implements Tool {
    public static class InvertIndexMapper extends TableMapper<ImmutableBytesWritable, WebPageItem> {
        @Override
        protected void map(ImmutableBytesWritable key, Result value, Context context) throws IOException, InterruptedException {
            String keyStr = new String(key.get());
            byte[] keyBytes = value.getValue("page".getBytes(), "key".getBytes());
            byte[] rankBytes = value.getValue("page".getBytes(), "rank".getBytes());
            byte[] titleBytes = value.getValue("page".getBytes(), "title".getBytes());
            byte[] contentBytes = value.getValue("page".getBytes(), "content".getBytes());
            double rank = Bytes.toDouble(rankBytes);

          //  判断主键字节长度 不能超过hbase的主键限制
          if (keyBytes.length>32767){
              keyBytes=Bytes.copy(keyBytes,0,32767);
          }
            WebPageItem webPageItem = new WebPageItem(keyStr, new String(keyBytes), rank, new String(titleBytes), new String(contentBytes));
            //关键字做行键   权重值，url
            context.write(new ImmutableBytesWritable(keyBytes), webPageItem);
        }
    }

    public static class InvertIndexReducer extends TableReducer<ImmutableBytesWritable, WebPageItem, NullWritable> {
        @Override
        protected void reduce(ImmutableBytesWritable key, Iterable<WebPageItem> values, Context context) throws IOException, InterruptedException {
            if (key.get().length >= 1) {
                //treeset 必须排序  可以是自然排序 有可以是客户化排序
                TreeSet<WebPageItem> set = new TreeSet<WebPageItem>((a, b) -> {
                    return a.getRank()==b.getRank()?a.getKeyWorld().compareTo(b.getKeyWorld()):(int)(a.getRank()-b.getRank())*10000;

                });
                //将reducer接受到的值数组 进行遍历  添加到treeset 从而实现排序 根据权重值
                for (WebPageItem value : values) {
                    set.add(value);
                }
                Put put = new Put(key.get());
                for (WebPageItem s : set) {
                    //url 做列名
                    //值存储顺序是 rank||title||content
                    String val = s.getRank() + "||" + s.getTitle() + "||" + s.getContent();
                    put.addColumn("page".getBytes(),s.getKey().getBytes(), Bytes.toBytes(val));

                }
                context.write(NullWritable.get(), put);
            }

        }
    }

    @Override
    public int run(String[] args) throws Exception {
        Configuration conf = getConf();
        //conf.set("hbase.zookeeper.quorum", "hadoop01:2181");
        Job job = Job.getInstance(conf, "InvertIndex");
        job.setJarByClass(this.getClass());
        TableMapReduceUtil.initTableMapperJob("join_result", new Scan(), InvertIndexMapper.class, ImmutableBytesWritable.class, WebPageItem.class, job);
        TableMapReduceUtil.initTableReducerJob("invertindex_result", InvertIndexReducer.class, job);

        return job.waitForCompletion(true) ? 0 : 1;
    }

    public static void main(String[] args) throws Exception {
        ToolRunner.run(new Step7_InvertIndex(), args);
    }

}
