package com.briup.searchengine.handle;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.MapWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.IOException;
import java.util.Map;

/**
 * @author adam
 * @date 2022/6/10
 * 清洗数据
 * 通过MR程序实现清洗逻辑：
 * <p>
 * 1. 判断抓取是否成功根据`f:st` 值为  2 success
 * <p>
 * 2. 保留列族 `il`下列的个数、列族`ol`下列的个数 、`s:s`、`p:t`、`p:c`,整体保留列族`il`和`ol`;
 * 3. 组成Result继续输出到新表 `clean_webpage`
 * <p>
 * map 读取未清洗的数据  清洗
 * reducer 保存清洗后的数据
 */
public class Step1_CleanData extends Configured implements Tool {


    public static class CleanDataMapper extends TableMapper<ImmutableBytesWritable, MapWritable> {

        @Override
        protected void map(ImmutableBytesWritable key, Result value, Context context) throws IOException, InterruptedException {
            MapWritable map = null;
            //将字节数组转成int  判断采集是否成功
            int fFlag = Bytes.toInt(HbaseUtils.getValueByFamilyAndCol(value, "f", "st"));
            //剔除采集或者解析失败的数据
            if (fFlag != 2)
                return;
            map = new MapWritable();
            //    获取要保留的数据
            //    il列族的个数
            int ilNum = HbaseUtils.countByFamily(value, "il");
            // ol列族的个数
            int olNum = HbaseUtils.countByFamily(value, "ol");
            // 获取网页内容
            byte[] pageContext = HbaseUtils.getValueByFamilyAndCol(value, "p", "c");
            //获取网页标题
            byte[] title = HbaseUtils.getValueByFamilyAndCol(value, "p", "t");
            if (title == HbaseUtils.NONE) {
                return;
            }
            //    获取nutch对当前url的评分
            byte[] sorce = HbaseUtils.getValueByFamilyAndCol(value, "s", "s");
            //获取当前uri
            byte[] baseUrl = HbaseUtils.getValueByFamilyAndCol(value, "f", "bas");
            //    获取ol列族的的列和值
            MapWritable olMap = HbaseUtils.getMapByFamily(value, "ol");
            //    获取il列族的的列和值
            MapWritable ilMap = HbaseUtils.getMapByFamily(value, "il");
            map.put(new BytesWritable("ilNum".getBytes()), new BytesWritable(Bytes.toBytes(ilNum)));
            map.put(new BytesWritable("olNum".getBytes()), new BytesWritable(Bytes.toBytes(olNum)));
            map.put(new BytesWritable("pageContext".getBytes()), new BytesWritable(pageContext));
            map.put(new BytesWritable("title".getBytes()), new BytesWritable(title));
            map.put(new BytesWritable("score".getBytes()), new BytesWritable(sorce));
            map.put(new BytesWritable("olMap".getBytes()), olMap);
            map.put(new BytesWritable("ilMap".getBytes()), ilMap);
            map.put(new BytesWritable("baseUrl".getBytes()), new BytesWritable(baseUrl));
            context.write(key, map);

        }
    }

    public static class CleanDataReducer extends TableReducer<ImmutableBytesWritable, MapWritable, NullWritable> {
        @Override
        protected void reduce(ImmutableBytesWritable key, Iterable<MapWritable> values, Context context) throws IOException, InterruptedException {
            //获取清洗之后的数据
            MapWritable map = values.iterator().next();
            String keyStr = new String(key.get(), key.getOffset(), key.getLength());
            keyStr = URLUtils.showUrl(keyStr);
            Put put = new Put(keyStr.getBytes());
            put.addColumn("page".getBytes(), "oln".getBytes(), ((BytesWritable) (map.get(new BytesWritable("olNum".getBytes())))).getBytes());
            put.addColumn("page".getBytes(), "iln".getBytes(), ((BytesWritable) (map.get(new BytesWritable("ilNum".getBytes())))).getBytes());
            put.addColumn("page".getBytes(), "t".getBytes(), ((BytesWritable) (map.get(new BytesWritable("title".getBytes())))).getBytes());
            put.addColumn("page".getBytes(), "s".getBytes(), ((BytesWritable) (map.get(new BytesWritable("score".getBytes())))).getBytes());
            put.addColumn("page".getBytes(), "cnt".getBytes(), ((BytesWritable) (map.get(new BytesWritable("pageContext".getBytes())))).getBytes());
            put.addColumn("page".getBytes(), "baseUrl".getBytes(), ((BytesWritable) (map.get(new BytesWritable("baseUrl".getBytes())))).getBytes());
            MapWritable olMap = (MapWritable) map.get(new BytesWritable("olMap".getBytes()));
            for (Map.Entry<Writable, Writable> entry : olMap.entrySet()) {
                put.addColumn("ol".getBytes(), ((BytesWritable) (entry.getKey())).getBytes(), ((BytesWritable) entry.getValue()).getBytes());
            }

            MapWritable ilMap = (MapWritable) map.get(new BytesWritable("ilMap".getBytes()));
            for (Map.Entry<Writable, Writable> entry : ilMap.entrySet()) {
                put.addColumn("il".getBytes(), ((BytesWritable) (entry.getKey())).getBytes(), ((BytesWritable) entry.getValue()).getBytes());
            }
            context.write(NullWritable.get(), put);

        }
    }

    @Override
    public int run(String[] strings) throws Exception {
        Configuration conf = getConf();

        String inTable = conf.get("in");
        String outTable = conf.get("out");

        inTable = "briup_webpage";
        outTable = "clean_webpage";

        //conf.set("hbase.zookeeper.quorum", "se:2181");
        Job job = Job.getInstance(conf, "cleanData");
        job.setJarByClass(this.getClass());
        //map  读取爬取到的数据  清洗 将符合要求的数据保存到map
        TableMapReduceUtil.initTableMapperJob(inTable, new Scan(), CleanDataMapper.class, ImmutableBytesWritable.class, MapWritable.class, job);
        //reduce 将清洗之后的数据 保存到hbase
        TableMapReduceUtil.initTableReducerJob(outTable, CleanDataReducer.class, job);
        return job.waitForCompletion(true) ? 0 : 1;
    }

    public static void main(String[] args) throws Exception {
        System.exit(ToolRunner.run(new Step1_CleanData(), args));
    }
}
