package com.briup.searchengine.handle;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Map;
import java.util.Set;

/**
 * @author adam
 * @date 2022/6/13
 * 连接两张表  clean_webpage  和 rank_result
 */
public class Step5_JoinCleanAndRank extends Configured implements Tool {

    public static class JoinCleanAndRankMapper extends TableMapper<ImmutableBytesWritable, MapWritable> {
        @Override
        protected void map(ImmutableBytesWritable key, Result value, Context context) throws IOException, InterruptedException {
            //    获取标志位的值 判断数据来源
            String s = new String(HbaseUtils.getValueByFamilyAndCol(value, "page", "i"));
            MapWritable map = new MapWritable();
            //a的话 是数据来源 clean_webpage
            System.out.println("s = " + s);
            if ("a".equals(s)) {
                byte[] keyBytes = HbaseUtils.getValueByFamilyAndCol(value, "page", "key");
                map.put(new Text("key"), new BytesWritable(keyBytes));
                byte[] titleBytes = HbaseUtils.getValueByFamilyAndCol(value, "page", "t");
                map.put(new Text("title"), new BytesWritable(titleBytes));
                byte[] contentBytes = HbaseUtils.getValueByFamilyAndCol(value, "page", "cnt");
                map.put(new Text("content"), new BytesWritable(contentBytes));
                context.write(key, map);
            } else {
                byte[] rankBytes = HbaseUtils.getValueByFamilyAndCol(value, "page", "rank");
                map.put(new Text("rank"), new BytesWritable(rankBytes));
                context.write(key, map);
            }
        }


    }

    public static class JoinCleanAndPankReducer extends TableReducer<ImmutableBytesWritable, MapWritable, NullWritable> {
        @Override
        protected void reduce(ImmutableBytesWritable key, Iterable<MapWritable> values, Context context) throws IOException, InterruptedException {

                Put put = new Put(key.get(),key.getOffset(),key.getLength());
                for (MapWritable map : values) {
                    Set<Map.Entry<Writable, Writable>> entrySet = map.entrySet();
                    for (Map.Entry<Writable, Writable> entry : entrySet) {
                        Text k = (Text) entry.getKey();
                        System.out.println(entry);
                        BytesWritable v = (BytesWritable) entry.getValue();
                        put.addColumn("page".getBytes(), k.toString().getBytes(), v.getBytes());
                    }

                context.write(NullWritable.get(), put);
            }
        }


    }

    @Override
    public int run(String[] args) throws Exception {
        Configuration conf = getConf();
        //conf.set("hbase.zookeeper.quorum", "hadoop01:2181");
        Job job = Job.getInstance(conf, "JoinCleanAndRank");
        job.setJarByClass(this.getClass());
        //创建 list对象存储Scan对象
        ArrayList<Scan> list = new ArrayList<>();
        Scan scan1 = new Scan();
        //设置要扫描的表的名字
        scan1.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME, "clean_webpage".getBytes());
        Scan scan2 = new Scan();
        scan2.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME, "rank_result".getBytes());
        list.add(scan1);
        list.add(scan2);
        //设置mapper
        TableMapReduceUtil.initTableMapperJob(list, JoinCleanAndRankMapper.class, ImmutableBytesWritable.class, MapWritable.class, job);
        TableMapReduceUtil.initTableReducerJob("last_result", JoinCleanAndPankReducer.class, job);
        return job.waitForCompletion(true) ? 0 : 1;
    }

    public static void main(String[] args) throws Exception {
        ToolRunner.run(new Step5_JoinCleanAndRank(), args);
    }
}
