package com.ljw.log;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;

import javax.security.auth.login.AppConfigurationEntry;

import java.io.IOException;
import java.text.SimpleDateFormat;

/**
 * 这是将日志文件存到日志里面
 * Created by admin on 2017/11/5.
 */
public class HBaseImportApp {

    public static class MyMapper extends Mapper<LongWritable,Text,LongWritable,Text>{
        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String line =value.toString();
            String[] splits=line.split("\t");
            Text outputValue=new Text();
            SimpleDateFormat simpleDateFormat=new SimpleDateFormat("yyyyMMddHHmmss");
            String data=simpleDateFormat.format(Long.parseLong(splits[1].trim()));
            String rowKey=splits[0].trim()+"_"+data;
            outputValue.set(rowKey+"\t"+line);
            context.write(key,outputValue);

        }
    }
    public  static class MyReducer extends TableReducer<LongWritable,Text,NullWritable>{
        private String cf="info";

        @Override
        protected void reduce(LongWritable key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
            String[] texts=null;
            String rowKey=null;
            for (Text text:values) {
                texts=text.toString().split("\t");
                rowKey=texts[0];
                Put put = new Put(Bytes.toBytes(rowKey));
                put.addColumn(Bytes.toBytes(cf),Bytes.toBytes("phone"),Bytes.toBytes(texts[1]));
                context.write(NullWritable.get(),put);
            }
        }
    }

    public static void main(String[] args) throws Exception {
//        Configuration configuration = HBaseConfiguration.create();
        Configuration configuration = new Configuration();
        configuration.set("hbase.rootdir","hdfs://192.168.170.223:9000/hbase");
        configuration.set("hbase.cluster.distributed","true");
        configuration.set("hbase.zookeeper.quorum","192.168.170.223:2181");
        configuration.set(TableOutputFormat.OUTPUT_TABLE,args[0]);
        Job job=Job.getInstance(configuration,"importHbase");

//        设置运行的jar
        job.setJarByClass(HBaseImportApp.class);
        TableMapReduceUtil.addDependencyJars(job);
//        设置mapper
        job.setMapperClass(MyMapper.class);
        job.setMapOutputKeyClass(LongWritable.class);
        job.setMapOutputKeyClass(Text.class);
        job.setReducerClass(MyReducer.class);
        FileInputFormat.addInputPaths(job,args[1]);

        job.setOutputFormatClass(TableOutputFormat.class);
        boolean b =job.waitForCompletion(true);
        if (!b) {
            throw new IOException("error with job!");
        }



    }
}
